about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py15
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py110
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py60
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py173
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py480
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py621
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py875
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py1236
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py168
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py2674
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py302
13 files changed, 6772 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py
new file mode 100644
index 00000000..f67ccda9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py
@@ -0,0 +1,15 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..92b775fb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, TYPE_CHECKING
+
+from msrest import Deserializer, Serializer
+
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core import AsyncARMPipelineClient
+
+from .. import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import DeleteOperations, EventsOperations, ExperimentsOperations, MetricOperations, RunArtifactsOperations, RunOperations, RunsOperations, SpansOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+class AzureMachineLearningWorkspaces:    # pylint: disable=too-many-instance-attributes
+    """AzureMachineLearningWorkspaces.
+
+    :ivar delete: DeleteOperations operations
+    :vartype delete: azure.mgmt.machinelearningservices.aio.operations.DeleteOperations
+    :ivar events: EventsOperations operations
+    :vartype events: azure.mgmt.machinelearningservices.aio.operations.EventsOperations
+    :ivar experiments: ExperimentsOperations operations
+    :vartype experiments: azure.mgmt.machinelearningservices.aio.operations.ExperimentsOperations
+    :ivar metric: MetricOperations operations
+    :vartype metric: azure.mgmt.machinelearningservices.aio.operations.MetricOperations
+    :ivar runs: RunsOperations operations
+    :vartype runs: azure.mgmt.machinelearningservices.aio.operations.RunsOperations
+    :ivar run_artifacts: RunArtifactsOperations operations
+    :vartype run_artifacts:
+     azure.mgmt.machinelearningservices.aio.operations.RunArtifactsOperations
+    :ivar run: RunOperations operations
+    :vartype run: azure.mgmt.machinelearningservices.aio.operations.RunOperations
+    :ivar spans: SpansOperations operations
+    :vartype spans: azure.mgmt.machinelearningservices.aio.operations.SpansOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+     Retry-After header is present.
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        base_url: str = "",
+        **kwargs: Any
+    ) -> None:
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.delete = DeleteOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.events = EventsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.experiments = ExperimentsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.metric = MetricOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.runs = RunsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run_artifacts = RunArtifactsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run = RunOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.spans = SpansOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request: HttpRequest,
+        **kwargs: Any
+    ) -> Awaitable[AsyncHttpResponse]:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = await client._send_request(request)
+        <AsyncHttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.AsyncHttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    async def close(self) -> None:
+        await self._client.close()
+
+    async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+        await self._client.__aenter__()
+        return self
+
+    async def __aexit__(self, *exc_details) -> None:
+        await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py
new file mode 100644
index 00000000..6d0b0e4d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
+
+from .._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):  # pylint: disable=too-many-instance-attributes
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        **kwargs: Any
+    ) -> None:
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs: Any
+    ) -> None:
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py
new file mode 100644
index 00000000..3e84a44a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py
@@ -0,0 +1,27 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._delete_operations import DeleteOperations
+from ._events_operations import EventsOperations
+from ._experiments_operations import ExperimentsOperations
+from ._metric_operations import MetricOperations
+from ._runs_operations import RunsOperations
+from ._run_artifacts_operations import RunArtifactsOperations
+from ._run_operations import RunOperations
+from ._spans_operations import SpansOperations
+
+__all__ = [
+    'DeleteOperations',
+    'EventsOperations',
+    'ExperimentsOperations',
+    'MetricOperations',
+    'RunsOperations',
+    'RunArtifactsOperations',
+    'RunOperations',
+    'SpansOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py
new file mode 100644
index 00000000..6841ffdc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py
@@ -0,0 +1,173 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Optional, TypeVar
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._delete_operations import build_get_configuration_request, build_patch_configuration_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DeleteOperations:
+    """DeleteOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def patch_configuration(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DeleteConfiguration"] = None,
+        **kwargs: Any
+    ) -> "_models.DeleteConfiguration":
+        """patch_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteConfiguration')
+        else:
+            _json = None
+
+        request = build_patch_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.patch_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    patch_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_configuration(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> "_models.DeleteConfiguration":
+        """get_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.get_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py
new file mode 100644
index 00000000..03dc9d42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py
@@ -0,0 +1,480 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Optional, TypeVar
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._events_operations import build_batch_post_by_experiment_id_request, build_batch_post_by_experiment_name_request, build_batch_post_request, build_post_by_experiment_id_request, build_post_by_experiment_name_request, build_post_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EventsOperations:
+    """EventsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def batch_post_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_post_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_post(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_by_experiment_name(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_by_experiment_id(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/events"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py
new file mode 100644
index 00000000..e1831c4c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py
@@ -0,0 +1,621 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._experiments_operations import build_create_request, build_delete_request_initial, build_delete_tags_request, build_get_by_id_request, build_get_by_query_request, build_get_request, build_update_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ExperimentsOperations:
+    """ExperimentsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Create an Experiment.
+
+        Create a new Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self.get_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.ModifyExperiment"] = None,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Update details of an Experiment.
+
+        Update details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: Experiment details which needs to be updated.
+        :type body: ~azure.mgmt.machinelearningservices.models.ModifyExperiment
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ModifyExperiment')
+        else:
+            _json = None
+
+        request = build_update_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    async def _delete_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self._delete_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_initial.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """Delete an Experiment.
+
+        Delete an existing Empty Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        url_safe_experiment_names_only: Optional[bool] = True,
+        body: Optional["_models.ExperimentQueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedExperimentList"]:
+        """Get all Experiments in a specific workspace.
+
+        Get all experiments in a specific workspace with the specified query filters.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param url_safe_experiment_names_only:
+        :type url_safe_experiment_names_only: bool
+        :param body: Query parameters for data sorting and filtering.
+        :type body: ~azure.mgmt.machinelearningservices.models.ExperimentQueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedExperimentList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedExperimentList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedExperimentList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=self.get_by_query.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedExperimentList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments:query"}  # type: ignore
+
+    @distributed_trace_async
+    async def delete_tags(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteTagsCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.DeleteExperimentTagsResult":
+        """Delete list of Tags in an Experiment.
+
+        Delete list of Tags from a specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: The requested tags list to be deleted.
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteTagsCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteExperimentTagsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteExperimentTagsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteExperimentTagsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteTagsCommand')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteExperimentTagsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/tags:delete"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py
new file mode 100644
index 00000000..0fed06c3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py
@@ -0,0 +1,875 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._metric_operations import build_delete_metrics_by_data_container_id_request_initial, build_delete_metrics_by_run_id_request_initial, build_get_full_fidelity_metric_request, build_get_metric_details_by_experiment_id_request, build_get_metric_details_by_experiment_name_request, build_get_sampled_metric_request, build_list_generic_resource_metrics_request, build_list_metric_request, build_post_run_metrics_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MetricOperations:
+    """MetricOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get_full_fidelity_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.RetrieveFullFidelityMetricRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.MetricV2":
+        """API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RetrieveFullFidelityMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RetrieveFullFidelityMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_full_fidelity_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_full_fidelity_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_full_fidelity_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/full"}  # type: ignore
+
+
+    @distributed_trace
+    def list_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.ListMetrics"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedMetricDefinitionList"]:
+        """API to list metric for a particular datacontainer and metricName.
+
+        API to list metric for a particular datacontainer and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_metric.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/list"}  # type: ignore
+
+    @distributed_trace
+    def list_generic_resource_metrics(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.ListGenericResourceMetrics"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedMetricDefinitionList"]:
+        """API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListGenericResourceMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_generic_resource_metrics.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_generic_resource_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/azuremonitor/list"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_sampled_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.GetSampledMetricRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.MetricSample":
+        """Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetSampledMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricSample, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricSample
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricSample"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetSampledMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_sampled_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_sampled_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricSample', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sampled_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/sample"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_run_metrics(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.BatchIMetricV2"] = None,
+        **kwargs: Any
+    ) -> "_models.PostRunMetricsResult":
+        """Post Metrics to a Run.
+
+        Post Metrics to a specific Run Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchIMetricV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: PostRunMetricsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.PostRunMetricsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PostRunMetricsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchIMetricV2')
+        else:
+            _json = None
+
+        request = build_post_run_metrics_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_run_metrics.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    post_run_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/batch"}  # type: ignore
+
+
+    async def _delete_metrics_by_data_container_id_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        data_container_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_data_container_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            data_container_id=data_container_id,
+            template_url=self._delete_metrics_by_data_container_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_data_container_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete_metrics_by_data_container_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        data_container_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """API to delete metrics by data container id.
+
+        API to delete metrics by data container id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param data_container_id:
+        :type data_container_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_metrics_by_data_container_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                data_container_id=data_container_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_data_container_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+    async def _delete_metrics_by_run_id_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_run_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self._delete_metrics_by_run_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_run_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete_metrics_by_run_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """API to delete metrics by run id.
+
+        API to delete metrics by run id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_metrics_by_run_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                run_id=run_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_run_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_metric_details_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        metric_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.RunMetric":
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_name=experiment_name,
+            template_url=self.get_metric_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/metrics/{metricId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_metric_details_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        metric_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.RunMetric":
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_id=experiment_id,
+            template_url=self.get_metric_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/metrics/{metricId}"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py
new file mode 100644
index 00000000..3710e381
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py
@@ -0,0 +1,1236 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._run_artifacts_operations import build_batch_create_empty_artifacts_by_experiment_id_request, build_batch_create_empty_artifacts_by_experiment_name_request, build_get_by_id_by_experiment_id_request, build_get_by_id_by_experiment_name_request, build_get_content_information_by_experiment_id_request, build_get_content_information_by_experiment_name_request, build_get_sas_uri_by_experiment_id_request, build_get_sas_uri_by_experiment_name_request, build_list_in_container_by_experiment_id_request, build_list_in_container_by_experiment_name_request, build_list_in_path_by_experiment_id_request, build_list_in_path_by_experiment_name_request, build_list_sas_by_prefix_by_experiment_id_request, build_list_sas_by_prefix_by_experiment_name_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunArtifactsOperations:
+    """RunArtifactsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_in_container_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_container_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_container_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_container_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_path_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_path_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_by_id_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Artifact":
+        """get_by_id_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_by_id_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_id_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Artifact":
+        """get_by_id_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_by_id_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_content_information_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.ArtifactContentInformation":
+        """get_content_information_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_content_information_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_content_information_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.ArtifactContentInformation":
+        """get_content_information_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_content_information_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_sas_uri_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> str:
+        """get_sas_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_sas_uri_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> str:
+        """get_sas_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactContentInformationList"]:
+        """list_sas_by_prefix_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactContentInformationList"]:
+        """list_sas_by_prefix_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace_async
+    async def batch_create_empty_artifacts_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.ArtifactPathList"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchArtifactContentInformationResult":
+        """batch_create_empty_artifacts_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_create_empty_artifacts_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.ArtifactPathList"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchArtifactContentInformationResult":
+        """batch_create_empty_artifacts_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py
new file mode 100644
index 00000000..64cbc7dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py
@@ -0,0 +1,168 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._run_operations import build_list_by_compute_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunOperations:
+    """RunOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_by_compute(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        compute_name: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """list_by_compute.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param compute_name:
+        :type compute_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.list_by_compute.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_by_compute.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/runs"}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py
new file mode 100644
index 00000000..b42721dc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py
@@ -0,0 +1,2674 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._runs_operations import build_add_or_modify_by_experiment_id_request, build_add_or_modify_by_experiment_name_request, build_add_or_modify_experiment_request, build_add_or_modify_run_service_instances_request, build_add_request, build_batch_add_or_modify_by_experiment_id_request, build_batch_add_or_modify_by_experiment_name_request, build_batch_get_run_data_request, build_cancel_run_with_uri_by_experiment_id_request, build_cancel_run_with_uri_by_experiment_name_request, build_delete_run_services_by_experiment_id_request, build_delete_run_services_by_experiment_name_request, build_delete_run_services_request, build_delete_tags_by_experiment_id_request, build_delete_tags_by_experiment_name_request, build_delete_tags_request, build_get_by_experiment_id_request, build_get_by_experiment_name_request, build_get_by_ids_by_experiment_id_request, build_get_by_ids_by_experiment_name_request, build_get_by_query_by_experiment_id_request, build_get_by_query_by_experiment_name_request, build_get_child_by_experiment_id_request, build_get_child_by_experiment_name_request, build_get_child_request, build_get_details_by_experiment_id_request, build_get_details_by_experiment_name_request, build_get_details_request, build_get_request, build_get_run_data_request, build_get_run_service_instances_request, build_modify_or_delete_tags_by_experiment_id_request, build_modify_or_delete_tags_by_experiment_name_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunsOperations:  # pylint: disable=too-many-public-methods
+    """RunsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_child_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_details_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_details_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_details(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get_details.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_run_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.GetRunDataRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.GetRunDataResult":
+        """get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunDataRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: GetRunDataResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.GetRunDataResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.GetRunDataResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunDataRequest')
+        else:
+            _json = None
+
+        request = build_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('GetRunDataResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/rundata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_get_run_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchRequest1"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchResult1":
+        """batch_get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchRequest1
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchResult1, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchResult1
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchResult1"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchRequest1')
+        else:
+            _json = None
+
+        request = build_batch_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchrundata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_add_or_modify_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.BatchAddOrModifyRunRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """batch_add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_add_or_modify_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.BatchAddOrModifyRunRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """batch_add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_experiment(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_experiment_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_experiment.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_experiment.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify_or_delete_tags_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteOrModifyTags"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """modify_or_delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify_or_delete_tags_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.DeleteOrModifyTags"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """modify_or_delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_run_service_instances(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        node_id: int,
+        body: Optional["_models.AddOrModifyRunServiceInstancesRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.RunServiceInstances":
+        """add_or_modify_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.AddOrModifyRunServiceInstancesRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'AddOrModifyRunServiceInstancesRequest')
+        else:
+            _json = None
+
+        request = build_add_or_modify_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_run_service_instances(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        node_id: int,
+        **kwargs: Any
+    ) -> "_models.RunServiceInstances":
+        """get_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            template_url=self.get_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_query_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.QueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_by_query_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs:query"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.QueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_by_query_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs:query"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_by_ids_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.GetRunsByIds"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """get_by_ids_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_ids_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.GetRunsByIds"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """get_by_ids_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def cancel_run_with_uri_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        cancelation_reason: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """cancel_run_with_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/cancel"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def cancel_run_with_uri_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        cancelation_reason: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """cancel_run_with_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/cancel"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py
new file mode 100644
index 00000000..92894166
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py
@@ -0,0 +1,302 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._spans_operations import build_get_active_request, build_list_request, build_post_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class SpansOperations:
+    """SpansOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.RunStatusSpans"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RunStatusSpans
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RunStatusSpans')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedSpanDefinition1List"]:
+        """list.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+    @distributed_trace
+    def get_active(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedSpanDefinition1List"]:
+        """get_active.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.get_active.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_active.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans/active"}  # type: ignore