about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/__init__.py18
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_azure_machine_learning_workspaces.py116
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_configuration.py64
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_vendor.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_version.py9
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py15
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py110
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py60
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py173
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py480
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py621
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py875
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py1236
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py168
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py2674
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py302
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/__init__.py287
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_azure_machine_learning_workspaces_enums.py80
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models.py4329
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models_py3.py4854
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/__init__.py27
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_delete_operations.py248
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_events_operations.py713
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_experiments_operations.py878
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_metric_operations.py1206
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_artifacts_operations.py1850
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_operations.py233
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_runs_operations.py3972
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_spans_operations.py429
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/py.typed1
33 files changed, 26144 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/__init__.py
new file mode 100644
index 00000000..da466144
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/__init__.py
@@ -0,0 +1,18 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+from ._version import VERSION
+
+__version__ = VERSION
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..7e5916c9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_azure_machine_learning_workspaces.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import TYPE_CHECKING
+
+from msrest import Deserializer, Serializer
+
+from azure.mgmt.core import ARMPipelineClient
+
+from . import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import DeleteOperations, EventsOperations, ExperimentsOperations, MetricOperations, RunArtifactsOperations, RunOperations, RunsOperations, SpansOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any
+
+    from azure.core.credentials import TokenCredential
+    from azure.core.rest import HttpRequest, HttpResponse
+
+class AzureMachineLearningWorkspaces(object):    # pylint: disable=too-many-instance-attributes
+    """AzureMachineLearningWorkspaces.
+
+    :ivar delete: DeleteOperations operations
+    :vartype delete: azure.mgmt.machinelearningservices.operations.DeleteOperations
+    :ivar events: EventsOperations operations
+    :vartype events: azure.mgmt.machinelearningservices.operations.EventsOperations
+    :ivar experiments: ExperimentsOperations operations
+    :vartype experiments: azure.mgmt.machinelearningservices.operations.ExperimentsOperations
+    :ivar metric: MetricOperations operations
+    :vartype metric: azure.mgmt.machinelearningservices.operations.MetricOperations
+    :ivar runs: RunsOperations operations
+    :vartype runs: azure.mgmt.machinelearningservices.operations.RunsOperations
+    :ivar run_artifacts: RunArtifactsOperations operations
+    :vartype run_artifacts: azure.mgmt.machinelearningservices.operations.RunArtifactsOperations
+    :ivar run: RunOperations operations
+    :vartype run: azure.mgmt.machinelearningservices.operations.RunOperations
+    :ivar spans: SpansOperations operations
+    :vartype spans: azure.mgmt.machinelearningservices.operations.SpansOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials.TokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+     Retry-After header is present.
+    """
+
+    def __init__(
+        self,
+        credential,  # type: "TokenCredential"
+        base_url="",  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.delete = DeleteOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.events = EventsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.experiments = ExperimentsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.metric = MetricOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.runs = RunsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run_artifacts = RunArtifactsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run = RunOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.spans = SpansOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request,  # type: HttpRequest
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> HttpResponse
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = client._send_request(request)
+        <HttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.HttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    def close(self):
+        # type: () -> None
+        self._client.close()
+
+    def __enter__(self):
+        # type: () -> AzureMachineLearningWorkspaces
+        self._client.__enter__()
+        return self
+
+    def __exit__(self, *exc_details):
+        # type: (Any) -> None
+        self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_configuration.py
new file mode 100644
index 00000000..b418413f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_configuration.py
@@ -0,0 +1,64 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
+
+from ._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any
+
+    from azure.core.credentials import TokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):  # pylint: disable=too-many-instance-attributes
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials.TokenCredential
+    """
+
+    def __init__(
+        self,
+        credential,  # type: "TokenCredential"
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_vendor.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_vendor.py
new file mode 100644
index 00000000..138f663c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_vendor.py
@@ -0,0 +1,27 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.pipeline.transport import HttpRequest
+
+def _convert_request(request, files=None):
+    data = request.content if not files else None
+    request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data)
+    if files:
+        request.set_formdata_body(files)
+    return request
+
+def _format_url_section(template, **kwargs):
+    components = template.split("/")
+    while components:
+        try:
+            return template.format(**kwargs)
+        except KeyError as key:
+            formatted_components = template.split("/")
+            components = [
+                c for c in formatted_components if "{}".format(key.args[0]) not in c
+            ]
+            template = "/".join(components)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_version.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_version.py
new file mode 100644
index 00000000..eae7c95b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/_version.py
@@ -0,0 +1,9 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+VERSION = "0.1.0"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py
new file mode 100644
index 00000000..f67ccda9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/__init__.py
@@ -0,0 +1,15 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+# `._patch.py` is used for handwritten extensions to the generated code
+# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+from ._patch import patch_sdk
+patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000..92b775fb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, TYPE_CHECKING
+
+from msrest import Deserializer, Serializer
+
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core import AsyncARMPipelineClient
+
+from .. import models
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import DeleteOperations, EventsOperations, ExperimentsOperations, MetricOperations, RunArtifactsOperations, RunOperations, RunsOperations, SpansOperations
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+class AzureMachineLearningWorkspaces:    # pylint: disable=too-many-instance-attributes
+    """AzureMachineLearningWorkspaces.
+
+    :ivar delete: DeleteOperations operations
+    :vartype delete: azure.mgmt.machinelearningservices.aio.operations.DeleteOperations
+    :ivar events: EventsOperations operations
+    :vartype events: azure.mgmt.machinelearningservices.aio.operations.EventsOperations
+    :ivar experiments: ExperimentsOperations operations
+    :vartype experiments: azure.mgmt.machinelearningservices.aio.operations.ExperimentsOperations
+    :ivar metric: MetricOperations operations
+    :vartype metric: azure.mgmt.machinelearningservices.aio.operations.MetricOperations
+    :ivar runs: RunsOperations operations
+    :vartype runs: azure.mgmt.machinelearningservices.aio.operations.RunsOperations
+    :ivar run_artifacts: RunArtifactsOperations operations
+    :vartype run_artifacts:
+     azure.mgmt.machinelearningservices.aio.operations.RunArtifactsOperations
+    :ivar run: RunOperations operations
+    :vartype run: azure.mgmt.machinelearningservices.aio.operations.RunOperations
+    :ivar spans: SpansOperations operations
+    :vartype spans: azure.mgmt.machinelearningservices.aio.operations.SpansOperations
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    :param base_url: Service URL. Default value is ''.
+    :type base_url: str
+    :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+     Retry-After header is present.
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        base_url: str = "",
+        **kwargs: Any
+    ) -> None:
+        self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, **kwargs)
+        self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+        client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+        self._serialize = Serializer(client_models)
+        self._deserialize = Deserializer(client_models)
+        self._serialize.client_side_validation = False
+        self.delete = DeleteOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.events = EventsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.experiments = ExperimentsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.metric = MetricOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.runs = RunsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run_artifacts = RunArtifactsOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.run = RunOperations(self._client, self._config, self._serialize, self._deserialize)
+        self.spans = SpansOperations(self._client, self._config, self._serialize, self._deserialize)
+
+
+    def _send_request(
+        self,
+        request: HttpRequest,
+        **kwargs: Any
+    ) -> Awaitable[AsyncHttpResponse]:
+        """Runs the network request through the client's chained policies.
+
+        >>> from azure.core.rest import HttpRequest
+        >>> request = HttpRequest("GET", "https://www.example.org/")
+        <HttpRequest [GET], url: 'https://www.example.org/'>
+        >>> response = await client._send_request(request)
+        <AsyncHttpResponse: 200 OK>
+
+        For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
+
+        :param request: The network request you want to make. Required.
+        :type request: ~azure.core.rest.HttpRequest
+        :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+        :return: The response of your network call. Does not do error handling on your response.
+        :rtype: ~azure.core.rest.AsyncHttpResponse
+        """
+
+        request_copy = deepcopy(request)
+        request_copy.url = self._client.format_url(request_copy.url)
+        return self._client.send_request(request_copy, **kwargs)
+
+    async def close(self) -> None:
+        await self._client.close()
+
+    async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+        await self._client.__aenter__()
+        return self
+
+    async def __aexit__(self, *exc_details) -> None:
+        await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py
new file mode 100644
index 00000000..6d0b0e4d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_configuration.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
+
+from .._version import VERSION
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from azure.core.credentials_async import AsyncTokenCredential
+
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):  # pylint: disable=too-many-instance-attributes
+    """Configuration for AzureMachineLearningWorkspaces.
+
+    Note that all parameters used to create this instance are saved as instance
+    attributes.
+
+    :param credential: Credential needed for the client to connect to Azure.
+    :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+    """
+
+    def __init__(
+        self,
+        credential: "AsyncTokenCredential",
+        **kwargs: Any
+    ) -> None:
+        super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+        if credential is None:
+            raise ValueError("Parameter 'credential' must not be None.")
+
+        self.credential = credential
+        self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+        kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION))
+        self._configure(**kwargs)
+
+    def _configure(
+        self,
+        **kwargs: Any
+    ) -> None:
+        self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+        self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+        self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+        self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+        self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+        self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+        self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+        self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+        self.authentication_policy = kwargs.get('authentication_policy')
+        if self.credential and not self.authentication_policy:
+            self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py
new file mode 100644
index 00000000..74e48ecd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/_patch.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# This file is used for handwritten extensions to the generated code. Example:
+# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
+def patch_sdk():
+    pass
\ No newline at end of file
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py
new file mode 100644
index 00000000..3e84a44a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/__init__.py
@@ -0,0 +1,27 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._delete_operations import DeleteOperations
+from ._events_operations import EventsOperations
+from ._experiments_operations import ExperimentsOperations
+from ._metric_operations import MetricOperations
+from ._runs_operations import RunsOperations
+from ._run_artifacts_operations import RunArtifactsOperations
+from ._run_operations import RunOperations
+from ._spans_operations import SpansOperations
+
+__all__ = [
+    'DeleteOperations',
+    'EventsOperations',
+    'ExperimentsOperations',
+    'MetricOperations',
+    'RunsOperations',
+    'RunArtifactsOperations',
+    'RunOperations',
+    'SpansOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py
new file mode 100644
index 00000000..6841ffdc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_delete_operations.py
@@ -0,0 +1,173 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Optional, TypeVar
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._delete_operations import build_get_configuration_request, build_patch_configuration_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class DeleteOperations:
+    """DeleteOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def patch_configuration(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.DeleteConfiguration"] = None,
+        **kwargs: Any
+    ) -> "_models.DeleteConfiguration":
+        """patch_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteConfiguration')
+        else:
+            _json = None
+
+        request = build_patch_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.patch_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    patch_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_configuration(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        **kwargs: Any
+    ) -> "_models.DeleteConfiguration":
+        """get_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.get_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py
new file mode 100644
index 00000000..03dc9d42
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_events_operations.py
@@ -0,0 +1,480 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Optional, TypeVar
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._events_operations import build_batch_post_by_experiment_id_request, build_batch_post_by_experiment_name_request, build_batch_post_request, build_post_by_experiment_id_request, build_post_by_experiment_name_request, build_post_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class EventsOperations:
+    """EventsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def batch_post_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_post_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_post(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchEventCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchEventCommandResult":
+        """batch_post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_by_experiment_name(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_by_experiment_id(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.BaseEvent"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/events"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py
new file mode 100644
index 00000000..e1831c4c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_experiments_operations.py
@@ -0,0 +1,621 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._experiments_operations import build_create_request, build_delete_request_initial, build_delete_tags_request, build_get_by_id_request, build_get_by_query_request, build_get_request, build_update_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ExperimentsOperations:
+    """ExperimentsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def create(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Create an Experiment.
+
+        Create a new Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self.get_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def update(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.ModifyExperiment"] = None,
+        **kwargs: Any
+    ) -> "_models.Experiment":
+        """Update details of an Experiment.
+
+        Update details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: Experiment details which needs to be updated.
+        :type body: ~azure.mgmt.machinelearningservices.models.ModifyExperiment
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ModifyExperiment')
+        else:
+            _json = None
+
+        request = build_update_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    async def _delete_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self._delete_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_initial.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """Delete an Experiment.
+
+        Delete an existing Empty Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        url_safe_experiment_names_only: Optional[bool] = True,
+        body: Optional["_models.ExperimentQueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedExperimentList"]:
+        """Get all Experiments in a specific workspace.
+
+        Get all experiments in a specific workspace with the specified query filters.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param url_safe_experiment_names_only:
+        :type url_safe_experiment_names_only: bool
+        :param body: Query parameters for data sorting and filtering.
+        :type body: ~azure.mgmt.machinelearningservices.models.ExperimentQueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedExperimentList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedExperimentList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedExperimentList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=self.get_by_query.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedExperimentList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments:query"}  # type: ignore
+
+    @distributed_trace_async
+    async def delete_tags(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteTagsCommand"] = None,
+        **kwargs: Any
+    ) -> "_models.DeleteExperimentTagsResult":
+        """Delete list of Tags in an Experiment.
+
+        Delete list of Tags from a specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: The requested tags list to be deleted.
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteTagsCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteExperimentTagsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteExperimentTagsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteExperimentTagsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteTagsCommand')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteExperimentTagsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/tags:delete"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py
new file mode 100644
index 00000000..0fed06c3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_metric_operations.py
@@ -0,0 +1,875 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._metric_operations import build_delete_metrics_by_data_container_id_request_initial, build_delete_metrics_by_run_id_request_initial, build_get_full_fidelity_metric_request, build_get_metric_details_by_experiment_id_request, build_get_metric_details_by_experiment_name_request, build_get_sampled_metric_request, build_list_generic_resource_metrics_request, build_list_metric_request, build_post_run_metrics_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MetricOperations:
+    """MetricOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def get_full_fidelity_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.RetrieveFullFidelityMetricRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.MetricV2":
+        """API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RetrieveFullFidelityMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RetrieveFullFidelityMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_full_fidelity_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_full_fidelity_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_full_fidelity_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/full"}  # type: ignore
+
+
+    @distributed_trace
+    def list_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.ListMetrics"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedMetricDefinitionList"]:
+        """API to list metric for a particular datacontainer and metricName.
+
+        API to list metric for a particular datacontainer and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_metric.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/list"}  # type: ignore
+
+    @distributed_trace
+    def list_generic_resource_metrics(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.ListGenericResourceMetrics"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedMetricDefinitionList"]:
+        """API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListGenericResourceMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_generic_resource_metrics.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_generic_resource_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/azuremonitor/list"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_sampled_metric(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.GetSampledMetricRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.MetricSample":
+        """Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetSampledMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricSample, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricSample
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricSample"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetSampledMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_sampled_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_sampled_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricSample', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sampled_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/sample"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def post_run_metrics(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.BatchIMetricV2"] = None,
+        **kwargs: Any
+    ) -> "_models.PostRunMetricsResult":
+        """Post Metrics to a Run.
+
+        Post Metrics to a specific Run Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchIMetricV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: PostRunMetricsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.PostRunMetricsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PostRunMetricsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchIMetricV2')
+        else:
+            _json = None
+
+        request = build_post_run_metrics_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_run_metrics.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    post_run_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/batch"}  # type: ignore
+
+
+    async def _delete_metrics_by_data_container_id_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        data_container_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_data_container_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            data_container_id=data_container_id,
+            template_url=self._delete_metrics_by_data_container_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_data_container_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete_metrics_by_data_container_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        data_container_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """API to delete metrics by data container id.
+
+        API to delete metrics by data container id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param data_container_id:
+        :type data_container_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_metrics_by_data_container_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                data_container_id=data_container_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_data_container_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+    async def _delete_metrics_by_run_id_initial(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> Any:
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_run_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self._delete_metrics_by_run_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_run_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def begin_delete_metrics_by_run_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> AsyncLROPoller[Any]:
+        """API to delete metrics by run id.
+
+        API to delete metrics by run id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
+         this operation to not poll, or pass in your own initialized polling object for a personal
+         polling strategy.
+        :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of AsyncLROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.AsyncLROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, AsyncPollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = await self._delete_metrics_by_run_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                run_id=run_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = AsyncNoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return AsyncLROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_run_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_metric_details_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        metric_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.RunMetric":
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_name=experiment_name,
+            template_url=self.get_metric_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/metrics/{metricId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_metric_details_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        metric_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.RunMetric":
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_id=experiment_id,
+            template_url=self.get_metric_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/metrics/{metricId}"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py
new file mode 100644
index 00000000..3710e381
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_artifacts_operations.py
@@ -0,0 +1,1236 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._run_artifacts_operations import build_batch_create_empty_artifacts_by_experiment_id_request, build_batch_create_empty_artifacts_by_experiment_name_request, build_get_by_id_by_experiment_id_request, build_get_by_id_by_experiment_name_request, build_get_content_information_by_experiment_id_request, build_get_content_information_by_experiment_name_request, build_get_sas_uri_by_experiment_id_request, build_get_sas_uri_by_experiment_name_request, build_list_in_container_by_experiment_id_request, build_list_in_container_by_experiment_name_request, build_list_in_path_by_experiment_id_request, build_list_in_path_by_experiment_name_request, build_list_sas_by_prefix_by_experiment_id_request, build_list_sas_by_prefix_by_experiment_name_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunArtifactsOperations:
+    """RunArtifactsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_in_container_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_container_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_container_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_container_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_path_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactList"]:
+        """list_in_path_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_by_id_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Artifact":
+        """get_by_id_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_by_id_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_id_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Artifact":
+        """get_by_id_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_by_id_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_content_information_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.ArtifactContentInformation":
+        """get_content_information_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_content_information_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_content_information_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.ArtifactContentInformation":
+        """get_content_information_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_content_information_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_sas_uri_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> str:
+        """get_sas_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_sas_uri_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        **kwargs: Any
+    ) -> str:
+        """get_sas_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactContentInformationList"]:
+        """list_sas_by_prefix_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        path: Optional[str] = None,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedArtifactContentInformationList"]:
+        """list_sas_by_prefix_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace_async
+    async def batch_create_empty_artifacts_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.ArtifactPathList"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchArtifactContentInformationResult":
+        """batch_create_empty_artifacts_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_create_empty_artifacts_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.ArtifactPathList"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchArtifactContentInformationResult":
+        """batch_create_empty_artifacts_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py
new file mode 100644
index 00000000..64cbc7dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_run_operations.py
@@ -0,0 +1,168 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._run_operations import build_list_by_compute_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunOperations:
+    """RunOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_by_compute(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        compute_name: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """list_by_compute.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param compute_name:
+        :type compute_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.list_by_compute.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list_by_compute.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/runs"}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py
new file mode 100644
index 00000000..b42721dc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_runs_operations.py
@@ -0,0 +1,2674 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._runs_operations import build_add_or_modify_by_experiment_id_request, build_add_or_modify_by_experiment_name_request, build_add_or_modify_experiment_request, build_add_or_modify_run_service_instances_request, build_add_request, build_batch_add_or_modify_by_experiment_id_request, build_batch_add_or_modify_by_experiment_name_request, build_batch_get_run_data_request, build_cancel_run_with_uri_by_experiment_id_request, build_cancel_run_with_uri_by_experiment_name_request, build_delete_run_services_by_experiment_id_request, build_delete_run_services_by_experiment_name_request, build_delete_run_services_request, build_delete_tags_by_experiment_id_request, build_delete_tags_by_experiment_name_request, build_delete_tags_request, build_get_by_experiment_id_request, build_get_by_experiment_name_request, build_get_by_ids_by_experiment_id_request, build_get_by_ids_by_experiment_name_request, build_get_by_query_by_experiment_id_request, build_get_by_query_by_experiment_name_request, build_get_child_by_experiment_id_request, build_get_child_by_experiment_name_request, build_get_child_request, build_get_details_by_experiment_id_request, build_get_details_by_experiment_name_request, build_get_details_request, build_get_request, build_get_run_data_request, build_get_run_service_instances_request, build_modify_or_delete_tags_by_experiment_id_request, build_modify_or_delete_tags_by_experiment_name_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class RunsOperations:  # pylint: disable=too-many-public-methods
+    """RunsOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_child_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        filter: Optional[str] = None,
+        continuationtoken: Optional[str] = None,
+        orderby: Optional[List[str]] = None,
+        sortorder: Optional[Union[str, "_models.SortOrderDirection"]] = None,
+        top: Optional[int] = None,
+        count: Optional[bool] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_child.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_child.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_details_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_details_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_details(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> "_models.RunDetails":
+        """get_details.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get_details.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_run_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.GetRunDataRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.GetRunDataResult":
+        """get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunDataRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: GetRunDataResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.GetRunDataResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.GetRunDataResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunDataRequest')
+        else:
+            _json = None
+
+        request = build_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('GetRunDataResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/rundata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_get_run_data(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        body: Optional["_models.BatchRequest1"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchResult1":
+        """batch_get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchRequest1
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchResult1, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchResult1
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchResult1"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchRequest1')
+        else:
+            _json = None
+
+        request = build_batch_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchrundata"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_add_or_modify_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.BatchAddOrModifyRunRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """batch_add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def batch_add_or_modify_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.BatchAddOrModifyRunRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """batch_add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_experiment(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add_or_modify_experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_experiment_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_experiment.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_experiment.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.CreateRun"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """add.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """get.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify_or_delete_tags_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteOrModifyTags"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """modify_or_delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def modify_or_delete_tags_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.DeleteOrModifyTags"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """modify_or_delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_tags(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional[List[str]] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_tags.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def delete_run_services(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.DeleteRunServices"] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """delete_run_services.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def add_or_modify_run_service_instances(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        node_id: int,
+        body: Optional["_models.AddOrModifyRunServiceInstancesRequest"] = None,
+        **kwargs: Any
+    ) -> "_models.RunServiceInstances":
+        """add_or_modify_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.AddOrModifyRunServiceInstancesRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'AddOrModifyRunServiceInstancesRequest')
+        else:
+            _json = None
+
+        request = build_add_or_modify_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_run_service_instances(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        node_id: int,
+        **kwargs: Any
+    ) -> "_models.RunServiceInstances":
+        """get_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            template_url=self.get_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_query_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.QueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_by_query_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs:query"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.QueryParams"] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedRunList"]:
+        """get_by_query_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs:query"}  # type: ignore
+
+    @distributed_trace_async
+    async def get_by_ids_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_id: str,
+        body: Optional["_models.GetRunsByIds"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """get_by_ids_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def get_by_ids_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        experiment_name: str,
+        body: Optional["_models.GetRunsByIds"] = None,
+        **kwargs: Any
+    ) -> "_models.BatchRunResult":
+        """get_by_ids_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def cancel_run_with_uri_by_experiment_id(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_id: str,
+        cancelation_reason: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """cancel_run_with_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/cancel"}  # type: ignore
+
+
+    @distributed_trace_async
+    async def cancel_run_with_uri_by_experiment_name(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        experiment_name: str,
+        cancelation_reason: Optional[str] = None,
+        **kwargs: Any
+    ) -> "_models.Run":
+        """cancel_run_with_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/cancel"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py
new file mode 100644
index 00000000..92894166
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/aio/operations/_spans_operations.py
@@ -0,0 +1,302 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models as _models
+from ..._vendor import _convert_request
+from ...operations._spans_operations import build_get_active_request, build_list_request, build_post_request
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class SpansOperations:
+    """SpansOperations async operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer) -> None:
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace_async
+    async def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        body: Optional["_models.RunStatusSpans"] = None,
+        **kwargs: Any
+    ) -> None:
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RunStatusSpans
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RunStatusSpans')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedSpanDefinition1List"]:
+        """list.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+    @distributed_trace
+    def get_active(
+        self,
+        subscription_id: str,
+        resource_group_name: str,
+        workspace_name: str,
+        run_id: str,
+        continuation_token_parameter: Optional[str] = None,
+        **kwargs: Any
+    ) -> AsyncIterable["_models.PaginatedSpanDefinition1List"]:
+        """get_active.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.get_active.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        async def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, AsyncList(list_of_elem)
+
+        async def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = await self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return AsyncItemPaged(
+            get_next, extract_data
+        )
+    get_active.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans/active"}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/__init__.py
new file mode 100644
index 00000000..422eea5c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/__init__.py
@@ -0,0 +1,287 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+    from ._models_py3 import AddOrModifyRunServiceInstancesRequest
+    from ._models_py3 import Artifact
+    from ._models_py3 import ArtifactContentInformation
+    from ._models_py3 import ArtifactDataPath
+    from ._models_py3 import ArtifactPath
+    from ._models_py3 import ArtifactPathList
+    from ._models_py3 import BaseEvent
+    from ._models_py3 import BatchAddOrModifyRunRequest
+    from ._models_py3 import BatchArtifactContentInformationResult
+    from ._models_py3 import BatchEventCommand
+    from ._models_py3 import BatchEventCommandResult
+    from ._models_py3 import BatchIMetricV2
+    from ._models_py3 import BatchRequest1
+    from ._models_py3 import BatchResult1
+    from ._models_py3 import BatchRunResult
+    from ._models_py3 import Compute
+    from ._models_py3 import ComputeRequest
+    from ._models_py3 import CreateRun
+    from ._models_py3 import CreatedFrom
+    from ._models_py3 import DatasetIdentifier
+    from ._models_py3 import DatasetInputDetails
+    from ._models_py3 import DatasetLineage
+    from ._models_py3 import DatasetOutputDetails
+    from ._models_py3 import DeleteConfiguration
+    from ._models_py3 import DeleteExperimentTagsResult
+    from ._models_py3 import DeleteOrModifyTags
+    from ._models_py3 import DeleteRunServices
+    from ._models_py3 import DeleteTagsCommand
+    from ._models_py3 import DerivedMetricKey
+    from ._models_py3 import EndpointSetting
+    from ._models_py3 import ErrorAdditionalInfo
+    from ._models_py3 import ErrorResponse
+    from ._models_py3 import Event
+    from ._models_py3 import Experiment
+    from ._models_py3 import ExperimentQueryParams
+    from ._models_py3 import GetRunDataRequest
+    from ._models_py3 import GetRunDataResult
+    from ._models_py3 import GetRunsByIds
+    from ._models_py3 import GetSampledMetricRequest
+    from ._models_py3 import IMetricV2
+    from ._models_py3 import InnerErrorResponse
+    from ._models_py3 import JobCost
+    from ._models_py3 import KeyValuePairBaseEventErrorResponse
+    from ._models_py3 import KeyValuePairString
+    from ._models_py3 import KeyValuePairStringJToken
+    from ._models_py3 import Link
+    from ._models_py3 import ListGenericResourceMetrics
+    from ._models_py3 import ListMetrics
+    from ._models_py3 import MetricDefinition
+    from ._models_py3 import MetricProperties
+    from ._models_py3 import MetricSample
+    from ._models_py3 import MetricSchema
+    from ._models_py3 import MetricSchemaProperty
+    from ._models_py3 import MetricV2
+    from ._models_py3 import MetricV2Value
+    from ._models_py3 import ModifyExperiment
+    from ._models_py3 import OutputDatasetLineage
+    from ._models_py3 import PaginatedArtifactContentInformationList
+    from ._models_py3 import PaginatedArtifactList
+    from ._models_py3 import PaginatedExperimentList
+    from ._models_py3 import PaginatedMetricDefinitionList
+    from ._models_py3 import PaginatedRunList
+    from ._models_py3 import PaginatedSpanDefinition1List
+    from ._models_py3 import PostRunMetricsError
+    from ._models_py3 import PostRunMetricsResult
+    from ._models_py3 import QueryParams
+    from ._models_py3 import QueueingInfo
+    from ._models_py3 import RetrieveFullFidelityMetricRequest
+    from ._models_py3 import RootError
+    from ._models_py3 import Run
+    from ._models_py3 import RunDetails
+    from ._models_py3 import RunDetailsWarning
+    from ._models_py3 import RunMetric
+    from ._models_py3 import RunOptions
+    from ._models_py3 import RunServiceInstances
+    from ._models_py3 import RunStatusSpans
+    from ._models_py3 import RunTypeV2
+    from ._models_py3 import ServiceInstance
+    from ._models_py3 import ServiceInstanceResult
+    from ._models_py3 import SpanContext
+    from ._models_py3 import SpanDefinition1
+    from ._models_py3 import SqlDataPath
+    from ._models_py3 import StoredProcedureParameter
+    from ._models_py3 import TypedAssetReference
+    from ._models_py3 import User
+except (SyntaxError, ImportError):
+    from ._models import AddOrModifyRunServiceInstancesRequest  # type: ignore
+    from ._models import Artifact  # type: ignore
+    from ._models import ArtifactContentInformation  # type: ignore
+    from ._models import ArtifactDataPath  # type: ignore
+    from ._models import ArtifactPath  # type: ignore
+    from ._models import ArtifactPathList  # type: ignore
+    from ._models import BaseEvent  # type: ignore
+    from ._models import BatchAddOrModifyRunRequest  # type: ignore
+    from ._models import BatchArtifactContentInformationResult  # type: ignore
+    from ._models import BatchEventCommand  # type: ignore
+    from ._models import BatchEventCommandResult  # type: ignore
+    from ._models import BatchIMetricV2  # type: ignore
+    from ._models import BatchRequest1  # type: ignore
+    from ._models import BatchResult1  # type: ignore
+    from ._models import BatchRunResult  # type: ignore
+    from ._models import Compute  # type: ignore
+    from ._models import ComputeRequest  # type: ignore
+    from ._models import CreateRun  # type: ignore
+    from ._models import CreatedFrom  # type: ignore
+    from ._models import DatasetIdentifier  # type: ignore
+    from ._models import DatasetInputDetails  # type: ignore
+    from ._models import DatasetLineage  # type: ignore
+    from ._models import DatasetOutputDetails  # type: ignore
+    from ._models import DeleteConfiguration  # type: ignore
+    from ._models import DeleteExperimentTagsResult  # type: ignore
+    from ._models import DeleteOrModifyTags  # type: ignore
+    from ._models import DeleteRunServices  # type: ignore
+    from ._models import DeleteTagsCommand  # type: ignore
+    from ._models import DerivedMetricKey  # type: ignore
+    from ._models import EndpointSetting  # type: ignore
+    from ._models import ErrorAdditionalInfo  # type: ignore
+    from ._models import ErrorResponse  # type: ignore
+    from ._models import Event  # type: ignore
+    from ._models import Experiment  # type: ignore
+    from ._models import ExperimentQueryParams  # type: ignore
+    from ._models import GetRunDataRequest  # type: ignore
+    from ._models import GetRunDataResult  # type: ignore
+    from ._models import GetRunsByIds  # type: ignore
+    from ._models import GetSampledMetricRequest  # type: ignore
+    from ._models import IMetricV2  # type: ignore
+    from ._models import InnerErrorResponse  # type: ignore
+    from ._models import JobCost  # type: ignore
+    from ._models import KeyValuePairBaseEventErrorResponse  # type: ignore
+    from ._models import KeyValuePairString  # type: ignore
+    from ._models import KeyValuePairStringJToken  # type: ignore
+    from ._models import Link  # type: ignore
+    from ._models import ListGenericResourceMetrics  # type: ignore
+    from ._models import ListMetrics  # type: ignore
+    from ._models import MetricDefinition  # type: ignore
+    from ._models import MetricProperties  # type: ignore
+    from ._models import MetricSample  # type: ignore
+    from ._models import MetricSchema  # type: ignore
+    from ._models import MetricSchemaProperty  # type: ignore
+    from ._models import MetricV2  # type: ignore
+    from ._models import MetricV2Value  # type: ignore
+    from ._models import ModifyExperiment  # type: ignore
+    from ._models import OutputDatasetLineage  # type: ignore
+    from ._models import PaginatedArtifactContentInformationList  # type: ignore
+    from ._models import PaginatedArtifactList  # type: ignore
+    from ._models import PaginatedExperimentList  # type: ignore
+    from ._models import PaginatedMetricDefinitionList  # type: ignore
+    from ._models import PaginatedRunList  # type: ignore
+    from ._models import PaginatedSpanDefinition1List  # type: ignore
+    from ._models import PostRunMetricsError  # type: ignore
+    from ._models import PostRunMetricsResult  # type: ignore
+    from ._models import QueryParams  # type: ignore
+    from ._models import QueueingInfo  # type: ignore
+    from ._models import RetrieveFullFidelityMetricRequest  # type: ignore
+    from ._models import RootError  # type: ignore
+    from ._models import Run  # type: ignore
+    from ._models import RunDetails  # type: ignore
+    from ._models import RunDetailsWarning  # type: ignore
+    from ._models import RunMetric  # type: ignore
+    from ._models import RunOptions  # type: ignore
+    from ._models import RunServiceInstances  # type: ignore
+    from ._models import RunStatusSpans  # type: ignore
+    from ._models import RunTypeV2  # type: ignore
+    from ._models import ServiceInstance  # type: ignore
+    from ._models import ServiceInstanceResult  # type: ignore
+    from ._models import SpanContext  # type: ignore
+    from ._models import SpanDefinition1  # type: ignore
+    from ._models import SqlDataPath  # type: ignore
+    from ._models import StoredProcedureParameter  # type: ignore
+    from ._models import TypedAssetReference  # type: ignore
+    from ._models import User  # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+    DatasetConsumptionType,
+    DatasetDeliveryMechanism,
+    DatasetOutputType,
+    ExperimentViewType,
+    MetricValueType,
+    RunStatus,
+    SortOrderDirection,
+    StoredProcedureParameterType,
+)
+
+__all__ = [
+    'AddOrModifyRunServiceInstancesRequest',
+    'Artifact',
+    'ArtifactContentInformation',
+    'ArtifactDataPath',
+    'ArtifactPath',
+    'ArtifactPathList',
+    'BaseEvent',
+    'BatchAddOrModifyRunRequest',
+    'BatchArtifactContentInformationResult',
+    'BatchEventCommand',
+    'BatchEventCommandResult',
+    'BatchIMetricV2',
+    'BatchRequest1',
+    'BatchResult1',
+    'BatchRunResult',
+    'Compute',
+    'ComputeRequest',
+    'CreateRun',
+    'CreatedFrom',
+    'DatasetIdentifier',
+    'DatasetInputDetails',
+    'DatasetLineage',
+    'DatasetOutputDetails',
+    'DeleteConfiguration',
+    'DeleteExperimentTagsResult',
+    'DeleteOrModifyTags',
+    'DeleteRunServices',
+    'DeleteTagsCommand',
+    'DerivedMetricKey',
+    'EndpointSetting',
+    'ErrorAdditionalInfo',
+    'ErrorResponse',
+    'Event',
+    'Experiment',
+    'ExperimentQueryParams',
+    'GetRunDataRequest',
+    'GetRunDataResult',
+    'GetRunsByIds',
+    'GetSampledMetricRequest',
+    'IMetricV2',
+    'InnerErrorResponse',
+    'JobCost',
+    'KeyValuePairBaseEventErrorResponse',
+    'KeyValuePairString',
+    'KeyValuePairStringJToken',
+    'Link',
+    'ListGenericResourceMetrics',
+    'ListMetrics',
+    'MetricDefinition',
+    'MetricProperties',
+    'MetricSample',
+    'MetricSchema',
+    'MetricSchemaProperty',
+    'MetricV2',
+    'MetricV2Value',
+    'ModifyExperiment',
+    'OutputDatasetLineage',
+    'PaginatedArtifactContentInformationList',
+    'PaginatedArtifactList',
+    'PaginatedExperimentList',
+    'PaginatedMetricDefinitionList',
+    'PaginatedRunList',
+    'PaginatedSpanDefinition1List',
+    'PostRunMetricsError',
+    'PostRunMetricsResult',
+    'QueryParams',
+    'QueueingInfo',
+    'RetrieveFullFidelityMetricRequest',
+    'RootError',
+    'Run',
+    'RunDetails',
+    'RunDetailsWarning',
+    'RunMetric',
+    'RunOptions',
+    'RunServiceInstances',
+    'RunStatusSpans',
+    'RunTypeV2',
+    'ServiceInstance',
+    'ServiceInstanceResult',
+    'SpanContext',
+    'SpanDefinition1',
+    'SqlDataPath',
+    'StoredProcedureParameter',
+    'TypedAssetReference',
+    'User',
+    'DatasetConsumptionType',
+    'DatasetDeliveryMechanism',
+    'DatasetOutputType',
+    'ExperimentViewType',
+    'MetricValueType',
+    'RunStatus',
+    'SortOrderDirection',
+    'StoredProcedureParameterType',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_azure_machine_learning_workspaces_enums.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000..cb89b2e3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,80 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class DatasetConsumptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    RUN_INPUT = "RunInput"
+    REFERENCE = "Reference"
+
+class DatasetDeliveryMechanism(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    DIRECT = "Direct"
+    MOUNT = "Mount"
+    DOWNLOAD = "Download"
+    HDFS = "Hdfs"
+
+class DatasetOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    RUN_OUTPUT = "RunOutput"
+    REFERENCE = "Reference"
+
+class ExperimentViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """ViewType filters experiments by their archived state. Default is ActiveOnly
+    """
+
+    DEFAULT = "Default"
+    ALL = "All"
+    ACTIVE_ONLY = "ActiveOnly"
+    ARCHIVED_ONLY = "ArchivedOnly"
+
+class MetricValueType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    INT = "Int"
+    DOUBLE = "Double"
+    STRING = "String"
+    BOOL = "Bool"
+    ARTIFACT = "Artifact"
+
+class RunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+    """Gets span status.
+    OpenTelemetry sets it to
+    https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry.Api/Trace/Status.cs
+    That status enums are not very meaningful to us, so we customize this.
+    """
+
+    NOT_STARTED = "NotStarted"
+    UNAPPROVED = "Unapproved"
+    PAUSING = "Pausing"
+    PAUSED = "Paused"
+    STARTING = "Starting"
+    PREPARING = "Preparing"
+    QUEUED = "Queued"
+    RUNNING = "Running"
+    FINALIZING = "Finalizing"
+    CANCEL_REQUESTED = "CancelRequested"
+    COMPLETED = "Completed"
+    FAILED = "Failed"
+    CANCELED = "Canceled"
+
+class SortOrderDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    ASC = "Asc"
+    DESC = "Desc"
+
+class StoredProcedureParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+
+    STRING = "String"
+    INT = "Int"
+    DECIMAL = "Decimal"
+    GUID = "Guid"
+    BOOLEAN = "Boolean"
+    DATE = "Date"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models.py
new file mode 100644
index 00000000..398700ee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models.py
@@ -0,0 +1,4329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class AddOrModifyRunServiceInstancesRequest(msrest.serialization.Model):
+    """AddOrModifyRunServiceInstancesRequest.
+
+    :ivar instances: Dictionary of :code:`<ServiceInstance>`.
+    :vartype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstance]
+    """
+
+    _attribute_map = {
+        'instances': {'key': 'instances', 'type': '{ServiceInstance}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword instances: Dictionary of :code:`<ServiceInstance>`.
+        :paramtype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstance]
+        """
+        super(AddOrModifyRunServiceInstancesRequest, self).__init__(**kwargs)
+        self.instances = kwargs.get('instances', None)
+
+
+class Artifact(msrest.serialization.Model):
+    """Details of an Artifact.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar artifact_id: The identifier of an Artifact. Format of ArtifactId -
+     {Origin}/{Container}/{Path}.
+    :vartype artifact_id: str
+    :ivar origin: Required. The origin of the Artifact creation request. Available origins are
+     'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset' and 'Unknown'.
+    :vartype origin: str
+    :ivar container: Required. The name of container. Artifacts can be grouped by container.
+    :vartype container: str
+    :ivar path: Required. The path to the Artifact in a container.
+    :vartype path: str
+    :ivar etag: The Etag of the Artifact.
+    :vartype etag: str
+    :ivar created_time: The Date and Time at which the Artifact is created. The DateTime is in UTC.
+    :vartype created_time: ~datetime.datetime
+    :ivar data_path:
+    :vartype data_path: ~azure.mgmt.machinelearningservices.models.ArtifactDataPath
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _validation = {
+        'origin': {'required': True},
+        'container': {'required': True},
+        'path': {'required': True},
+    }
+
+    _attribute_map = {
+        'artifact_id': {'key': 'artifactId', 'type': 'str'},
+        'origin': {'key': 'origin', 'type': 'str'},
+        'container': {'key': 'container', 'type': 'str'},
+        'path': {'key': 'path', 'type': 'str'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'data_path': {'key': 'dataPath', 'type': 'ArtifactDataPath'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword artifact_id: The identifier of an Artifact. Format of ArtifactId -
+         {Origin}/{Container}/{Path}.
+        :paramtype artifact_id: str
+        :keyword origin: Required. The origin of the Artifact creation request. Available origins are
+         'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset' and 'Unknown'.
+        :paramtype origin: str
+        :keyword container: Required. The name of container. Artifacts can be grouped by container.
+        :paramtype container: str
+        :keyword path: Required. The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword etag: The Etag of the Artifact.
+        :paramtype etag: str
+        :keyword created_time: The Date and Time at which the Artifact is created. The DateTime is in
+         UTC.
+        :paramtype created_time: ~datetime.datetime
+        :keyword data_path:
+        :paramtype data_path: ~azure.mgmt.machinelearningservices.models.ArtifactDataPath
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(Artifact, self).__init__(**kwargs)
+        self.artifact_id = kwargs.get('artifact_id', None)
+        self.origin = kwargs['origin']
+        self.container = kwargs['container']
+        self.path = kwargs['path']
+        self.etag = kwargs.get('etag', None)
+        self.created_time = kwargs.get('created_time', None)
+        self.data_path = kwargs.get('data_path', None)
+        self.tags = kwargs.get('tags', None)
+
+
+class ArtifactContentInformation(msrest.serialization.Model):
+    """Details of an Artifact Content Information.
+
+    :ivar content_uri: The URI of the content.
+    :vartype content_uri: str
+    :ivar origin: The origin of the Artifact creation request. Available origins are
+     'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset', 'ComputeRecord', 'Metric', and
+     'Unknown'.
+    :vartype origin: str
+    :ivar container: The name of container. Artifacts can be grouped by container.
+    :vartype container: str
+    :ivar path: The path to the Artifact in a container.
+    :vartype path: str
+    :ivar tags: A set of tags. The tags on the artifact.
+    :vartype tags: dict[str, str]
+    """
+
+    _attribute_map = {
+        'content_uri': {'key': 'contentUri', 'type': 'str'},
+        'origin': {'key': 'origin', 'type': 'str'},
+        'container': {'key': 'container', 'type': 'str'},
+        'path': {'key': 'path', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword content_uri: The URI of the content.
+        :paramtype content_uri: str
+        :keyword origin: The origin of the Artifact creation request. Available origins are
+         'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset', 'ComputeRecord', 'Metric', and
+         'Unknown'.
+        :paramtype origin: str
+        :keyword container: The name of container. Artifacts can be grouped by container.
+        :paramtype container: str
+        :keyword path: The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword tags: A set of tags. The tags on the artifact.
+        :paramtype tags: dict[str, str]
+        """
+        super(ArtifactContentInformation, self).__init__(**kwargs)
+        self.content_uri = kwargs.get('content_uri', None)
+        self.origin = kwargs.get('origin', None)
+        self.container = kwargs.get('container', None)
+        self.path = kwargs.get('path', None)
+        self.tags = kwargs.get('tags', None)
+
+
+class ArtifactDataPath(msrest.serialization.Model):
+    """ArtifactDataPath.
+
+    :ivar data_store_name:
+    :vartype data_store_name: str
+    :ivar relative_path:
+    :vartype relative_path: str
+    :ivar sql_data_path:
+    :vartype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+    """
+
+    _attribute_map = {
+        'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
+        'relative_path': {'key': 'relativePath', 'type': 'str'},
+        'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_store_name:
+        :paramtype data_store_name: str
+        :keyword relative_path:
+        :paramtype relative_path: str
+        :keyword sql_data_path:
+        :paramtype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+        """
+        super(ArtifactDataPath, self).__init__(**kwargs)
+        self.data_store_name = kwargs.get('data_store_name', None)
+        self.relative_path = kwargs.get('relative_path', None)
+        self.sql_data_path = kwargs.get('sql_data_path', None)
+
+
+class ArtifactPath(msrest.serialization.Model):
+    """Details of an Artifact Path.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar path: Required. The path to the Artifact in a container.
+    :vartype path: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _validation = {
+        'path': {'required': True},
+    }
+
+    _attribute_map = {
+        'path': {'key': 'path', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword path: Required. The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(ArtifactPath, self).__init__(**kwargs)
+        self.path = kwargs['path']
+        self.tags = kwargs.get('tags', None)
+
+
+class ArtifactPathList(msrest.serialization.Model):
+    """Contains list of Artifact Paths.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar paths: Required. List of Artifact Paths.
+    :vartype paths: list[~azure.mgmt.machinelearningservices.models.ArtifactPath]
+    """
+
+    _validation = {
+        'paths': {'required': True},
+    }
+
+    _attribute_map = {
+        'paths': {'key': 'paths', 'type': '[ArtifactPath]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword paths: Required. List of Artifact Paths.
+        :paramtype paths: list[~azure.mgmt.machinelearningservices.models.ArtifactPath]
+        """
+        super(ArtifactPathList, self).__init__(**kwargs)
+        self.paths = kwargs['paths']
+
+
+class BaseEvent(msrest.serialization.Model):
+    """Base event is the envelope used to post event data to the Event controller.
+
+    :ivar timestamp:
+    :vartype timestamp: ~datetime.datetime
+    :ivar name:
+    :vartype name: str
+    :ivar data: Anything.
+    :vartype data: any
+    """
+
+    _attribute_map = {
+        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data': {'key': 'data', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword timestamp:
+        :paramtype timestamp: ~datetime.datetime
+        :keyword name:
+        :paramtype name: str
+        :keyword data: Anything.
+        :paramtype data: any
+        """
+        super(BaseEvent, self).__init__(**kwargs)
+        self.timestamp = kwargs.get('timestamp', None)
+        self.name = kwargs.get('name', None)
+        self.data = kwargs.get('data', None)
+
+
+class BatchAddOrModifyRunRequest(msrest.serialization.Model):
+    """BatchAddOrModifyRunRequest.
+
+    :ivar runs:
+    :vartype runs: list[~azure.mgmt.machinelearningservices.models.CreateRun]
+    """
+
+    _attribute_map = {
+        'runs': {'key': 'runs', 'type': '[CreateRun]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword runs:
+        :paramtype runs: list[~azure.mgmt.machinelearningservices.models.CreateRun]
+        """
+        super(BatchAddOrModifyRunRequest, self).__init__(**kwargs)
+        self.runs = kwargs.get('runs', None)
+
+
+class BatchArtifactContentInformationResult(msrest.serialization.Model):
+    """Results of the Batch Artifact Content Information request.
+
+    :ivar artifacts: Artifact details of the Artifact Ids requested.
+    :vartype artifacts: dict[str, ~azure.mgmt.machinelearningservices.models.Artifact]
+    :ivar artifact_content_information: Artifact Content Information details of the Artifact Ids
+     requested.
+    :vartype artifact_content_information: dict[str,
+     ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+    :ivar errors: Errors occurred while fetching the requested Artifact Ids.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'artifacts': {'key': 'artifacts', 'type': '{Artifact}'},
+        'artifact_content_information': {'key': 'artifactContentInformation', 'type': '{ArtifactContentInformation}'},
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword artifacts: Artifact details of the Artifact Ids requested.
+        :paramtype artifacts: dict[str, ~azure.mgmt.machinelearningservices.models.Artifact]
+        :keyword artifact_content_information: Artifact Content Information details of the Artifact Ids
+         requested.
+        :paramtype artifact_content_information: dict[str,
+         ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+        :keyword errors: Errors occurred while fetching the requested Artifact Ids.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchArtifactContentInformationResult, self).__init__(**kwargs)
+        self.artifacts = kwargs.get('artifacts', None)
+        self.artifact_content_information = kwargs.get('artifact_content_information', None)
+        self.errors = kwargs.get('errors', None)
+
+
+class BatchEventCommand(msrest.serialization.Model):
+    """BatchEventCommand.
+
+    :ivar events:
+    :vartype events: list[~azure.mgmt.machinelearningservices.models.BaseEvent]
+    """
+
+    _attribute_map = {
+        'events': {'key': 'events', 'type': '[BaseEvent]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword events:
+        :paramtype events: list[~azure.mgmt.machinelearningservices.models.BaseEvent]
+        """
+        super(BatchEventCommand, self).__init__(**kwargs)
+        self.events = kwargs.get('events', None)
+
+
+class BatchEventCommandResult(msrest.serialization.Model):
+    """BatchEventCommandResult.
+
+    :ivar errors:
+    :vartype errors:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairBaseEventErrorResponse]
+    :ivar successes:
+    :vartype successes: list[str]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '[KeyValuePairBaseEventErrorResponse]'},
+        'successes': {'key': 'successes', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword errors:
+        :paramtype errors:
+         list[~azure.mgmt.machinelearningservices.models.KeyValuePairBaseEventErrorResponse]
+        :keyword successes:
+        :paramtype successes: list[str]
+        """
+        super(BatchEventCommandResult, self).__init__(**kwargs)
+        self.errors = kwargs.get('errors', None)
+        self.successes = kwargs.get('successes', None)
+
+
+class BatchIMetricV2(msrest.serialization.Model):
+    """BatchIMetricV2.
+
+    :ivar values:
+    :vartype values: list[~azure.mgmt.machinelearningservices.models.IMetricV2]
+    :ivar report_errors:
+    :vartype report_errors: bool
+    """
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '[IMetricV2]'},
+        'report_errors': {'key': 'reportErrors', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword values:
+        :paramtype values: list[~azure.mgmt.machinelearningservices.models.IMetricV2]
+        :keyword report_errors:
+        :paramtype report_errors: bool
+        """
+        super(BatchIMetricV2, self).__init__(**kwargs)
+        self.values = kwargs.get('values', None)
+        self.report_errors = kwargs.get('report_errors', None)
+
+
+class BatchRequest1(msrest.serialization.Model):
+    """BatchRequest1.
+
+    :ivar requests: Dictionary of :code:`<GetRunDataRequest>`.
+    :vartype requests: dict[str, ~azure.mgmt.machinelearningservices.models.GetRunDataRequest]
+    """
+
+    _attribute_map = {
+        'requests': {'key': 'requests', 'type': '{GetRunDataRequest}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword requests: Dictionary of :code:`<GetRunDataRequest>`.
+        :paramtype requests: dict[str, ~azure.mgmt.machinelearningservices.models.GetRunDataRequest]
+        """
+        super(BatchRequest1, self).__init__(**kwargs)
+        self.requests = kwargs.get('requests', None)
+
+
+class BatchResult1(msrest.serialization.Model):
+    """BatchResult1.
+
+    :ivar successful_results: Dictionary of :code:`<GetRunDataResult>`.
+    :vartype successful_results: dict[str,
+     ~azure.mgmt.machinelearningservices.models.GetRunDataResult]
+    :ivar failed_results: Dictionary of :code:`<ErrorResponse>`.
+    :vartype failed_results: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'successful_results': {'key': 'successfulResults', 'type': '{GetRunDataResult}'},
+        'failed_results': {'key': 'failedResults', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword successful_results: Dictionary of :code:`<GetRunDataResult>`.
+        :paramtype successful_results: dict[str,
+         ~azure.mgmt.machinelearningservices.models.GetRunDataResult]
+        :keyword failed_results: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype failed_results: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchResult1, self).__init__(**kwargs)
+        self.successful_results = kwargs.get('successful_results', None)
+        self.failed_results = kwargs.get('failed_results', None)
+
+
+class BatchRunResult(msrest.serialization.Model):
+    """BatchRunResult.
+
+    :ivar runs: Dictionary of :code:`<Run>`.
+    :vartype runs: dict[str, ~azure.mgmt.machinelearningservices.models.Run]
+    :ivar errors: Dictionary of :code:`<ErrorResponse>`.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'runs': {'key': 'runs', 'type': '{Run}'},
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword runs: Dictionary of :code:`<Run>`.
+        :paramtype runs: dict[str, ~azure.mgmt.machinelearningservices.models.Run]
+        :keyword errors: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchRunResult, self).__init__(**kwargs)
+        self.runs = kwargs.get('runs', None)
+        self.errors = kwargs.get('errors', None)
+
+
+class Compute(msrest.serialization.Model):
+    """Compute.
+
+    :ivar target:
+    :vartype target: str
+    :ivar target_type:
+    :vartype target_type: str
+    :ivar vm_size:
+    :vartype vm_size: str
+    :ivar instance_count:
+    :vartype instance_count: int
+    :ivar gpu_count:
+    :vartype gpu_count: int
+    :ivar priority:
+    :vartype priority: str
+    :ivar region:
+    :vartype region: str
+    """
+
+    _attribute_map = {
+        'target': {'key': 'target', 'type': 'str'},
+        'target_type': {'key': 'targetType', 'type': 'str'},
+        'vm_size': {'key': 'vmSize', 'type': 'str'},
+        'instance_count': {'key': 'instanceCount', 'type': 'int'},
+        'gpu_count': {'key': 'gpuCount', 'type': 'int'},
+        'priority': {'key': 'priority', 'type': 'str'},
+        'region': {'key': 'region', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword target:
+        :paramtype target: str
+        :keyword target_type:
+        :paramtype target_type: str
+        :keyword vm_size:
+        :paramtype vm_size: str
+        :keyword instance_count:
+        :paramtype instance_count: int
+        :keyword gpu_count:
+        :paramtype gpu_count: int
+        :keyword priority:
+        :paramtype priority: str
+        :keyword region:
+        :paramtype region: str
+        """
+        super(Compute, self).__init__(**kwargs)
+        self.target = kwargs.get('target', None)
+        self.target_type = kwargs.get('target_type', None)
+        self.vm_size = kwargs.get('vm_size', None)
+        self.instance_count = kwargs.get('instance_count', None)
+        self.gpu_count = kwargs.get('gpu_count', None)
+        self.priority = kwargs.get('priority', None)
+        self.region = kwargs.get('region', None)
+
+
+class ComputeRequest(msrest.serialization.Model):
+    """ComputeRequest.
+
+    :ivar node_count:
+    :vartype node_count: int
+    :ivar gpu_count:
+    :vartype gpu_count: int
+    """
+
+    _attribute_map = {
+        'node_count': {'key': 'nodeCount', 'type': 'int'},
+        'gpu_count': {'key': 'gpuCount', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword node_count:
+        :paramtype node_count: int
+        :keyword gpu_count:
+        :paramtype gpu_count: int
+        """
+        super(ComputeRequest, self).__init__(**kwargs)
+        self.node_count = kwargs.get('node_count', None)
+        self.gpu_count = kwargs.get('gpu_count', None)
+
+
+class CreatedFrom(msrest.serialization.Model):
+    """CreatedFrom.
+
+    :ivar type:  The only acceptable values to pass in are None and "Notebook". The default value
+     is None.
+    :vartype type: str
+    :ivar location_type:  The only acceptable values to pass in are None and "ArtifactId". The
+     default value is None.
+    :vartype location_type: str
+    :ivar location:
+    :vartype location: str
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'location_type': {'key': 'locationType', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type:  The only acceptable values to pass in are None and "Notebook". The default
+         value is None.
+        :paramtype type: str
+        :keyword location_type:  The only acceptable values to pass in are None and "ArtifactId". The
+         default value is None.
+        :paramtype location_type: str
+        :keyword location:
+        :paramtype location: str
+        """
+        super(CreatedFrom, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.location_type = kwargs.get('location_type', None)
+        self.location = kwargs.get('location', None)
+
+
+class CreateRun(msrest.serialization.Model):
+    """CreateRun.
+
+    :ivar run_id: The identifier for the run. Run IDs must be less than 256 characters and contain
+     only alphanumeric characters with dashes and underscores.
+    :vartype run_id: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+    :vartype parent_run_id: str
+    :ivar experiment_id: The Id of the experiment that created this run.
+    :vartype experiment_id: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar options:
+    :vartype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+    :ivar is_virtual: A virtual run can set an active child run that will override the virtual run
+     status and properties.
+    :vartype is_virtual: bool
+    :ivar display_name:
+    :vartype display_name: str
+    :ivar name:
+    :vartype name: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar description:
+    :vartype description: str
+    :ivar hidden:
+    :vartype hidden: bool
+    :ivar run_type:
+    :vartype run_type: str
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar parameters: Dictionary of :code:`<any>`.
+    :vartype parameters: dict[str, any]
+    :ivar action_uris: Dictionary of :code:`<string>`.
+    :vartype action_uris: dict[str, str]
+    :ivar script_name:
+    :vartype script_name: str
+    :ivar target:
+    :vartype target: str
+    :ivar unique_child_run_compute_targets:
+    :vartype unique_child_run_compute_targets: list[str]
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar settings: Dictionary of :code:`<string>`.
+    :vartype settings: dict[str, str]
+    :ivar services: Dictionary of :code:`<EndpointSetting>`.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets:
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets:
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    :ivar primary_metric_name:
+    :vartype primary_metric_name: str
+    :ivar created_from:
+    :vartype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+    :ivar cancel_uri:
+    :vartype cancel_uri: str
+    :ivar complete_uri:
+    :vartype complete_uri: str
+    :ivar diagnostics_uri:
+    :vartype diagnostics_uri: str
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar queueing_info:
+    :vartype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+    :ivar active_child_run_id: The RunId of the active child on a virtual run.
+    :vartype active_child_run_id: str
+    :ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'unique_child_run_compute_targets': {'unique': True},
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'options': {'key': 'options', 'type': 'RunOptions'},
+        'is_virtual': {'key': 'isVirtual', 'type': 'bool'},
+        'display_name': {'key': 'displayName', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'hidden': {'key': 'hidden', 'type': 'bool'},
+        'run_type': {'key': 'runType', 'type': 'str'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'action_uris': {'key': 'actionUris', 'type': '{str}'},
+        'script_name': {'key': 'scriptName', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+        'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+        'created_from': {'key': 'createdFrom', 'type': 'CreatedFrom'},
+        'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
+        'complete_uri': {'key': 'completeUri', 'type': 'str'},
+        'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
+        'active_child_run_id': {'key': 'activeChildRunId', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id: The identifier for the run. Run IDs must be less than 256 characters and
+         contain only alphanumeric characters with dashes and underscores.
+        :paramtype run_id: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+        :paramtype parent_run_id: str
+        :keyword experiment_id: The Id of the experiment that created this run.
+        :paramtype experiment_id: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword options:
+        :paramtype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+        :keyword is_virtual: A virtual run can set an active child run that will override the virtual
+         run status and properties.
+        :paramtype is_virtual: bool
+        :keyword display_name:
+        :paramtype display_name: str
+        :keyword name:
+        :paramtype name: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword description:
+        :paramtype description: str
+        :keyword hidden:
+        :paramtype hidden: bool
+        :keyword run_type:
+        :paramtype run_type: str
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: Dictionary of :code:`<any>`.
+        :paramtype parameters: dict[str, any]
+        :keyword action_uris: Dictionary of :code:`<string>`.
+        :paramtype action_uris: dict[str, str]
+        :keyword script_name:
+        :paramtype script_name: str
+        :keyword target:
+        :paramtype target: str
+        :keyword unique_child_run_compute_targets:
+        :paramtype unique_child_run_compute_targets: list[str]
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword settings: Dictionary of :code:`<string>`.
+        :paramtype settings: dict[str, str]
+        :keyword services: Dictionary of :code:`<EndpointSetting>`.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets:
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets:
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        :keyword primary_metric_name:
+        :paramtype primary_metric_name: str
+        :keyword created_from:
+        :paramtype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+        :keyword cancel_uri:
+        :paramtype cancel_uri: str
+        :keyword complete_uri:
+        :paramtype complete_uri: str
+        :keyword diagnostics_uri:
+        :paramtype diagnostics_uri: str
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword queueing_info:
+        :paramtype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+        :keyword active_child_run_id: The RunId of the active child on a virtual run.
+        :paramtype active_child_run_id: str
+        :keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(CreateRun, self).__init__(**kwargs)
+        self.run_id = kwargs.get('run_id', None)
+        self.parent_run_id = kwargs.get('parent_run_id', None)
+        self.experiment_id = kwargs.get('experiment_id', None)
+        self.status = kwargs.get('status', None)
+        self.start_time_utc = kwargs.get('start_time_utc', None)
+        self.end_time_utc = kwargs.get('end_time_utc', None)
+        self.options = kwargs.get('options', None)
+        self.is_virtual = kwargs.get('is_virtual', None)
+        self.display_name = kwargs.get('display_name', None)
+        self.name = kwargs.get('name', None)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.description = kwargs.get('description', None)
+        self.hidden = kwargs.get('hidden', None)
+        self.run_type = kwargs.get('run_type', None)
+        self.run_type_v2 = kwargs.get('run_type_v2', None)
+        self.properties = kwargs.get('properties', None)
+        self.parameters = kwargs.get('parameters', None)
+        self.action_uris = kwargs.get('action_uris', None)
+        self.script_name = kwargs.get('script_name', None)
+        self.target = kwargs.get('target', None)
+        self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
+        self.tags = kwargs.get('tags', None)
+        self.settings = kwargs.get('settings', None)
+        self.services = kwargs.get('services', None)
+        self.input_datasets = kwargs.get('input_datasets', None)
+        self.output_datasets = kwargs.get('output_datasets', None)
+        self.run_definition = kwargs.get('run_definition', None)
+        self.job_specification = kwargs.get('job_specification', None)
+        self.primary_metric_name = kwargs.get('primary_metric_name', None)
+        self.created_from = kwargs.get('created_from', None)
+        self.cancel_uri = kwargs.get('cancel_uri', None)
+        self.complete_uri = kwargs.get('complete_uri', None)
+        self.diagnostics_uri = kwargs.get('diagnostics_uri', None)
+        self.compute_request = kwargs.get('compute_request', None)
+        self.compute = kwargs.get('compute', None)
+        self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
+        self.queueing_info = kwargs.get('queueing_info', None)
+        self.active_child_run_id = kwargs.get('active_child_run_id', None)
+        self.inputs = kwargs.get('inputs', None)
+        self.outputs = kwargs.get('outputs', None)
+
+
+class DatasetIdentifier(msrest.serialization.Model):
+    """DatasetIdentifier.
+
+    :ivar saved_id:
+    :vartype saved_id: str
+    :ivar registered_id:
+    :vartype registered_id: str
+    :ivar registered_version:
+    :vartype registered_version: str
+    """
+
+    _attribute_map = {
+        'saved_id': {'key': 'savedId', 'type': 'str'},
+        'registered_id': {'key': 'registeredId', 'type': 'str'},
+        'registered_version': {'key': 'registeredVersion', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword saved_id:
+        :paramtype saved_id: str
+        :keyword registered_id:
+        :paramtype registered_id: str
+        :keyword registered_version:
+        :paramtype registered_version: str
+        """
+        super(DatasetIdentifier, self).__init__(**kwargs)
+        self.saved_id = kwargs.get('saved_id', None)
+        self.registered_id = kwargs.get('registered_id', None)
+        self.registered_version = kwargs.get('registered_version', None)
+
+
+class DatasetInputDetails(msrest.serialization.Model):
+    """DatasetInputDetails.
+
+    :ivar input_name:
+    :vartype input_name: str
+    :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
+    :vartype mechanism: str or ~azure.mgmt.machinelearningservices.models.DatasetDeliveryMechanism
+    :ivar path_on_compute:
+    :vartype path_on_compute: str
+    """
+
+    _attribute_map = {
+        'input_name': {'key': 'inputName', 'type': 'str'},
+        'mechanism': {'key': 'mechanism', 'type': 'str'},
+        'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword input_name:
+        :paramtype input_name: str
+        :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
+        :paramtype mechanism: str or
+         ~azure.mgmt.machinelearningservices.models.DatasetDeliveryMechanism
+        :keyword path_on_compute:
+        :paramtype path_on_compute: str
+        """
+        super(DatasetInputDetails, self).__init__(**kwargs)
+        self.input_name = kwargs.get('input_name', None)
+        self.mechanism = kwargs.get('mechanism', None)
+        self.path_on_compute = kwargs.get('path_on_compute', None)
+
+
+class DatasetLineage(msrest.serialization.Model):
+    """DatasetLineage.
+
+    :ivar identifier:
+    :vartype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+    :ivar consumption_type: Possible values include: "RunInput", "Reference".
+    :vartype consumption_type: str or
+     ~azure.mgmt.machinelearningservices.models.DatasetConsumptionType
+    :ivar input_details:
+    :vartype input_details: ~azure.mgmt.machinelearningservices.models.DatasetInputDetails
+    """
+
+    _attribute_map = {
+        'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
+        'consumption_type': {'key': 'consumptionType', 'type': 'str'},
+        'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword identifier:
+        :paramtype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+        :keyword consumption_type: Possible values include: "RunInput", "Reference".
+        :paramtype consumption_type: str or
+         ~azure.mgmt.machinelearningservices.models.DatasetConsumptionType
+        :keyword input_details:
+        :paramtype input_details: ~azure.mgmt.machinelearningservices.models.DatasetInputDetails
+        """
+        super(DatasetLineage, self).__init__(**kwargs)
+        self.identifier = kwargs.get('identifier', None)
+        self.consumption_type = kwargs.get('consumption_type', None)
+        self.input_details = kwargs.get('input_details', None)
+
+
+class DatasetOutputDetails(msrest.serialization.Model):
+    """DatasetOutputDetails.
+
+    :ivar output_name:
+    :vartype output_name: str
+    """
+
+    _attribute_map = {
+        'output_name': {'key': 'outputName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword output_name:
+        :paramtype output_name: str
+        """
+        super(DatasetOutputDetails, self).__init__(**kwargs)
+        self.output_name = kwargs.get('output_name', None)
+
+
+class DeleteConfiguration(msrest.serialization.Model):
+    """DeleteConfiguration.
+
+    :ivar workspace_id:
+    :vartype workspace_id: str
+    :ivar is_enabled:
+    :vartype is_enabled: bool
+    :ivar cutoff_days:
+    :vartype cutoff_days: int
+    """
+
+    _attribute_map = {
+        'workspace_id': {'key': 'workspaceId', 'type': 'str'},
+        'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
+        'cutoff_days': {'key': 'cutoffDays', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword workspace_id:
+        :paramtype workspace_id: str
+        :keyword is_enabled:
+        :paramtype is_enabled: bool
+        :keyword cutoff_days:
+        :paramtype cutoff_days: int
+        """
+        super(DeleteConfiguration, self).__init__(**kwargs)
+        self.workspace_id = kwargs.get('workspace_id', None)
+        self.is_enabled = kwargs.get('is_enabled', None)
+        self.cutoff_days = kwargs.get('cutoff_days', None)
+
+
+class DeleteExperimentTagsResult(msrest.serialization.Model):
+    """DeleteExperimentTagsResult.
+
+    :ivar errors: Dictionary of :code:`<ErrorResponse>`.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword errors: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(DeleteExperimentTagsResult, self).__init__(**kwargs)
+        self.errors = kwargs.get('errors', None)
+
+
+class DeleteOrModifyTags(msrest.serialization.Model):
+    """The Tags to modify or delete.
+
+    :ivar tags_to_modify: The KV pairs of tags to modify.
+    :vartype tags_to_modify: dict[str, str]
+    :ivar tags_to_delete: The list of tags to delete.
+    :vartype tags_to_delete: list[str]
+    """
+
+    _attribute_map = {
+        'tags_to_modify': {'key': 'tagsToModify', 'type': '{str}'},
+        'tags_to_delete': {'key': 'tagsToDelete', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword tags_to_modify: The KV pairs of tags to modify.
+        :paramtype tags_to_modify: dict[str, str]
+        :keyword tags_to_delete: The list of tags to delete.
+        :paramtype tags_to_delete: list[str]
+        """
+        super(DeleteOrModifyTags, self).__init__(**kwargs)
+        self.tags_to_modify = kwargs.get('tags_to_modify', None)
+        self.tags_to_delete = kwargs.get('tags_to_delete', None)
+
+
+class DeleteRunServices(msrest.serialization.Model):
+    """The Services to delete.
+
+    :ivar services_to_delete: The list of Services to delete.
+    :vartype services_to_delete: list[str]
+    """
+
+    _attribute_map = {
+        'services_to_delete': {'key': 'servicesToDelete', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword services_to_delete: The list of Services to delete.
+        :paramtype services_to_delete: list[str]
+        """
+        super(DeleteRunServices, self).__init__(**kwargs)
+        self.services_to_delete = kwargs.get('services_to_delete', None)
+
+
+class DeleteTagsCommand(msrest.serialization.Model):
+    """DeleteTagsCommand.
+
+    :ivar tags: A set of tags.
+    :vartype tags: list[str]
+    """
+
+    _attribute_map = {
+        'tags': {'key': 'tags', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword tags: A set of tags.
+        :paramtype tags: list[str]
+        """
+        super(DeleteTagsCommand, self).__init__(**kwargs)
+        self.tags = kwargs.get('tags', None)
+
+
+class DerivedMetricKey(msrest.serialization.Model):
+    """DerivedMetricKey.
+
+    :ivar namespace:
+    :vartype namespace: str
+    :ivar name:
+    :vartype name: str
+    :ivar labels:
+    :vartype labels: list[str]
+    :ivar column_names:
+    :vartype column_names: list[str]
+    """
+
+    _validation = {
+        'labels': {'unique': True},
+        'column_names': {'unique': True},
+    }
+
+    _attribute_map = {
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'labels': {'key': 'labels', 'type': '[str]'},
+        'column_names': {'key': 'columnNames', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword namespace:
+        :paramtype namespace: str
+        :keyword name:
+        :paramtype name: str
+        :keyword labels:
+        :paramtype labels: list[str]
+        :keyword column_names:
+        :paramtype column_names: list[str]
+        """
+        super(DerivedMetricKey, self).__init__(**kwargs)
+        self.namespace = kwargs.get('namespace', None)
+        self.name = kwargs.get('name', None)
+        self.labels = kwargs.get('labels', None)
+        self.column_names = kwargs.get('column_names', None)
+
+
+class EndpointSetting(msrest.serialization.Model):
+    """EndpointSetting.
+
+    :ivar type:
+    :vartype type: str
+    :ivar port:
+    :vartype port: int
+    :ivar ssl_thumbprint:
+    :vartype ssl_thumbprint: str
+    :ivar endpoint:
+    :vartype endpoint: str
+    :ivar proxy_endpoint:
+    :vartype proxy_endpoint: str
+    :ivar status:
+    :vartype status: str
+    :ivar error_message:
+    :vartype error_message: str
+    :ivar enabled:
+    :vartype enabled: bool
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'},
+        'endpoint': {'key': 'endpoint', 'type': 'str'},
+        'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error_message': {'key': 'errorMessage', 'type': 'str'},
+        'enabled': {'key': 'enabled', 'type': 'bool'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type:
+        :paramtype type: str
+        :keyword port:
+        :paramtype port: int
+        :keyword ssl_thumbprint:
+        :paramtype ssl_thumbprint: str
+        :keyword endpoint:
+        :paramtype endpoint: str
+        :keyword proxy_endpoint:
+        :paramtype proxy_endpoint: str
+        :keyword status:
+        :paramtype status: str
+        :keyword error_message:
+        :paramtype error_message: str
+        :keyword enabled:
+        :paramtype enabled: bool
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(EndpointSetting, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.port = kwargs.get('port', None)
+        self.ssl_thumbprint = kwargs.get('ssl_thumbprint', None)
+        self.endpoint = kwargs.get('endpoint', None)
+        self.proxy_endpoint = kwargs.get('proxy_endpoint', None)
+        self.status = kwargs.get('status', None)
+        self.error_message = kwargs.get('error_message', None)
+        self.enabled = kwargs.get('enabled', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+    """The resource management error additional info.
+
+    :ivar type: The additional info type.
+    :vartype type: str
+    :ivar info: The additional info.
+    :vartype info: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'info': {'key': 'info', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type: The additional info type.
+        :paramtype type: str
+        :keyword info: The additional info.
+        :paramtype info: any
+        """
+        super(ErrorAdditionalInfo, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.info = kwargs.get('info', None)
+
+
+class ErrorResponse(msrest.serialization.Model):
+    """The error response.
+
+    :ivar error: The root error.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.RootError
+    :ivar correlation: Dictionary containing correlation details for the error.
+    :vartype correlation: dict[str, str]
+    :ivar environment: The hosting environment.
+    :vartype environment: str
+    :ivar location: The Azure region.
+    :vartype location: str
+    :ivar time: The time in UTC.
+    :vartype time: ~datetime.datetime
+    :ivar component_name: Component name where error originated/encountered.
+    :vartype component_name: str
+    """
+
+    _attribute_map = {
+        'error': {'key': 'error', 'type': 'RootError'},
+        'correlation': {'key': 'correlation', 'type': '{str}'},
+        'environment': {'key': 'environment', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+        'time': {'key': 'time', 'type': 'iso-8601'},
+        'component_name': {'key': 'componentName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword error: The root error.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.RootError
+        :keyword correlation: Dictionary containing correlation details for the error.
+        :paramtype correlation: dict[str, str]
+        :keyword environment: The hosting environment.
+        :paramtype environment: str
+        :keyword location: The Azure region.
+        :paramtype location: str
+        :keyword time: The time in UTC.
+        :paramtype time: ~datetime.datetime
+        :keyword component_name: Component name where error originated/encountered.
+        :paramtype component_name: str
+        """
+        super(ErrorResponse, self).__init__(**kwargs)
+        self.error = kwargs.get('error', None)
+        self.correlation = kwargs.get('correlation', None)
+        self.environment = kwargs.get('environment', None)
+        self.location = kwargs.get('location', None)
+        self.time = kwargs.get('time', None)
+        self.component_name = kwargs.get('component_name', None)
+
+
+class Event(msrest.serialization.Model):
+    """Event.
+
+    :ivar name: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event name.
+    :vartype name: str
+    :ivar timestamp: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event timestamp.
+    :vartype timestamp: ~datetime.datetime
+    :ivar attributes: Gets the System.Collections.Generic.IDictionary`2 collection of attributes
+     associated with the event.
+    :vartype attributes: dict[str, any]
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
+        'attributes': {'key': 'attributes', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword name: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event name.
+        :paramtype name: str
+        :keyword timestamp: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event timestamp.
+        :paramtype timestamp: ~datetime.datetime
+        :keyword attributes: Gets the System.Collections.Generic.IDictionary`2 collection of attributes
+         associated with the event.
+        :paramtype attributes: dict[str, any]
+        """
+        super(Event, self).__init__(**kwargs)
+        self.name = kwargs.get('name', None)
+        self.timestamp = kwargs.get('timestamp', None)
+        self.attributes = kwargs.get('attributes', None)
+
+
+class Experiment(msrest.serialization.Model):
+    """Experiment.
+
+    :ivar experiment_id:
+    :vartype experiment_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar created_utc:
+    :vartype created_utc: ~datetime.datetime
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar archived_time:
+    :vartype archived_time: ~datetime.datetime
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar artifact_location:
+    :vartype artifact_location: str
+    """
+
+    _attribute_map = {
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'archived_time': {'key': 'archivedTime', 'type': 'iso-8601'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'artifact_location': {'key': 'artifactLocation', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword experiment_id:
+        :paramtype experiment_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword created_utc:
+        :paramtype created_utc: ~datetime.datetime
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword archived_time:
+        :paramtype archived_time: ~datetime.datetime
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword artifact_location:
+        :paramtype artifact_location: str
+        """
+        super(Experiment, self).__init__(**kwargs)
+        self.experiment_id = kwargs.get('experiment_id', None)
+        self.name = kwargs.get('name', None)
+        self.description = kwargs.get('description', None)
+        self.created_utc = kwargs.get('created_utc', None)
+        self.tags = kwargs.get('tags', None)
+        self.archived_time = kwargs.get('archived_time', None)
+        self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
+        self.artifact_location = kwargs.get('artifact_location', None)
+
+
+class ExperimentQueryParams(msrest.serialization.Model):
+    """Extends Query Params DTO for ViewType.
+
+    :ivar view_type: ViewType filters experiments by their archived state. Default is ActiveOnly.
+     Possible values include: "Default", "All", "ActiveOnly", "ArchivedOnly".
+    :vartype view_type: str or ~azure.mgmt.machinelearningservices.models.ExperimentViewType
+    :ivar filter: Allows for filtering the collection of resources.
+     The expression specified is evaluated for each resource in the collection, and only items
+     where the expression evaluates to true are included in the response.
+     See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+     details on the expression syntax.
+    :vartype filter: str
+    :ivar continuation_token: The continuation token to use for getting the next set of resources.
+    :vartype continuation_token: str
+    :ivar order_by: The comma separated list of resource properties to use for sorting the
+     requested resources.
+     Optionally, can be followed by either 'asc' or 'desc'.
+    :vartype order_by: str
+    :ivar top: The maximum number of items in the resource collection to be included in the result.
+     If not specified, all items are returned.
+    :vartype top: int
+    """
+
+    _attribute_map = {
+        'view_type': {'key': 'viewType', 'type': 'str'},
+        'filter': {'key': 'filter', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'order_by': {'key': 'orderBy', 'type': 'str'},
+        'top': {'key': 'top', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword view_type: ViewType filters experiments by their archived state. Default is
+         ActiveOnly. Possible values include: "Default", "All", "ActiveOnly", "ArchivedOnly".
+        :paramtype view_type: str or ~azure.mgmt.machinelearningservices.models.ExperimentViewType
+        :keyword filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+         See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+         details on the expression syntax.
+        :paramtype filter: str
+        :keyword continuation_token: The continuation token to use for getting the next set of
+         resources.
+        :paramtype continuation_token: str
+        :keyword order_by: The comma separated list of resource properties to use for sorting the
+         requested resources.
+         Optionally, can be followed by either 'asc' or 'desc'.
+        :paramtype order_by: str
+        :keyword top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :paramtype top: int
+        """
+        super(ExperimentQueryParams, self).__init__(**kwargs)
+        self.view_type = kwargs.get('view_type', None)
+        self.filter = kwargs.get('filter', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.order_by = kwargs.get('order_by', None)
+        self.top = kwargs.get('top', None)
+
+
+class GetRunDataRequest(msrest.serialization.Model):
+    """GetRunDataRequest.
+
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar select_run_metadata:
+    :vartype select_run_metadata: bool
+    :ivar select_run_definition:
+    :vartype select_run_definition: bool
+    :ivar select_job_specification:
+    :vartype select_job_specification: bool
+    """
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'select_run_metadata': {'key': 'selectRunMetadata', 'type': 'bool'},
+        'select_run_definition': {'key': 'selectRunDefinition', 'type': 'bool'},
+        'select_job_specification': {'key': 'selectJobSpecification', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword select_run_metadata:
+        :paramtype select_run_metadata: bool
+        :keyword select_run_definition:
+        :paramtype select_run_definition: bool
+        :keyword select_job_specification:
+        :paramtype select_job_specification: bool
+        """
+        super(GetRunDataRequest, self).__init__(**kwargs)
+        self.run_id = kwargs.get('run_id', None)
+        self.select_run_metadata = kwargs.get('select_run_metadata', None)
+        self.select_run_definition = kwargs.get('select_run_definition', None)
+        self.select_job_specification = kwargs.get('select_job_specification', None)
+
+
+class GetRunDataResult(msrest.serialization.Model):
+    """GetRunDataResult.
+
+    :ivar run_metadata: The definition of a Run.
+    :vartype run_metadata: ~azure.mgmt.machinelearningservices.models.Run
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    """
+
+    _attribute_map = {
+        'run_metadata': {'key': 'runMetadata', 'type': 'Run'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_metadata: The definition of a Run.
+        :paramtype run_metadata: ~azure.mgmt.machinelearningservices.models.Run
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        """
+        super(GetRunDataResult, self).__init__(**kwargs)
+        self.run_metadata = kwargs.get('run_metadata', None)
+        self.run_definition = kwargs.get('run_definition', None)
+        self.job_specification = kwargs.get('job_specification', None)
+
+
+class GetRunsByIds(msrest.serialization.Model):
+    """GetRunsByIds.
+
+    :ivar run_ids:
+    :vartype run_ids: list[str]
+    """
+
+    _attribute_map = {
+        'run_ids': {'key': 'runIds', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_ids:
+        :paramtype run_ids: list[str]
+        """
+        super(GetRunsByIds, self).__init__(**kwargs)
+        self.run_ids = kwargs.get('run_ids', None)
+
+
+class GetSampledMetricRequest(msrest.serialization.Model):
+    """GetSampledMetricRequest.
+
+    :ivar metric_name:
+    :vartype metric_name: str
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    """
+
+    _attribute_map = {
+        'metric_name': {'key': 'metricName', 'type': 'str'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric_name:
+        :paramtype metric_name: str
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        """
+        super(GetSampledMetricRequest, self).__init__(**kwargs)
+        self.metric_name = kwargs.get('metric_name', None)
+        self.metric_namespace = kwargs.get('metric_namespace', None)
+
+
+class IMetricV2(msrest.serialization.Model):
+    """Sequence of one or many values sharing a common  DataContainerId, Name, and Schema. Used only for Post Metrics.
+
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value: The list of values.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    """
+
+    _attribute_map = {
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value: The list of values.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        """
+        super(IMetricV2, self).__init__(**kwargs)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.name = kwargs.get('name', None)
+        self.columns = kwargs.get('columns', None)
+        self.namespace = kwargs.get('namespace', None)
+        self.standard_schema_id = kwargs.get('standard_schema_id', None)
+        self.value = kwargs.get('value', None)
+
+
+class InnerErrorResponse(msrest.serialization.Model):
+    """A nested structure of errors.
+
+    :ivar code: The error code.
+    :vartype code: str
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword code: The error code.
+        :paramtype code: str
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        """
+        super(InnerErrorResponse, self).__init__(**kwargs)
+        self.code = kwargs.get('code', None)
+        self.inner_error = kwargs.get('inner_error', None)
+
+
+class JobCost(msrest.serialization.Model):
+    """JobCost.
+
+    :ivar charged_cpu_core_seconds:
+    :vartype charged_cpu_core_seconds: float
+    :ivar charged_cpu_memory_megabyte_seconds:
+    :vartype charged_cpu_memory_megabyte_seconds: float
+    :ivar charged_gpu_seconds:
+    :vartype charged_gpu_seconds: float
+    :ivar charged_node_utilization_seconds:
+    :vartype charged_node_utilization_seconds: float
+    """
+
+    _attribute_map = {
+        'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'},
+        'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'},
+        'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'},
+        'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword charged_cpu_core_seconds:
+        :paramtype charged_cpu_core_seconds: float
+        :keyword charged_cpu_memory_megabyte_seconds:
+        :paramtype charged_cpu_memory_megabyte_seconds: float
+        :keyword charged_gpu_seconds:
+        :paramtype charged_gpu_seconds: float
+        :keyword charged_node_utilization_seconds:
+        :paramtype charged_node_utilization_seconds: float
+        """
+        super(JobCost, self).__init__(**kwargs)
+        self.charged_cpu_core_seconds = kwargs.get('charged_cpu_core_seconds', None)
+        self.charged_cpu_memory_megabyte_seconds = kwargs.get('charged_cpu_memory_megabyte_seconds', None)
+        self.charged_gpu_seconds = kwargs.get('charged_gpu_seconds', None)
+        self.charged_node_utilization_seconds = kwargs.get('charged_node_utilization_seconds', None)
+
+
+class KeyValuePairBaseEventErrorResponse(msrest.serialization.Model):
+    """KeyValuePairBaseEventErrorResponse.
+
+    :ivar key: Base event is the envelope used to post event data to the Event controller.
+    :vartype key: ~azure.mgmt.machinelearningservices.models.BaseEvent
+    :ivar value: The error response.
+    :vartype value: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'BaseEvent'},
+        'value': {'key': 'value', 'type': 'ErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword key: Base event is the envelope used to post event data to the Event controller.
+        :paramtype key: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword value: The error response.
+        :paramtype value: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        """
+        super(KeyValuePairBaseEventErrorResponse, self).__init__(**kwargs)
+        self.key = kwargs.get('key', None)
+        self.value = kwargs.get('value', None)
+
+
+class KeyValuePairString(msrest.serialization.Model):
+    """KeyValuePairString.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value:
+    :vartype value: str
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value:
+        :paramtype value: str
+        """
+        super(KeyValuePairString, self).__init__(**kwargs)
+        self.key = kwargs.get('key', None)
+        self.value = kwargs.get('value', None)
+
+
+class KeyValuePairStringJToken(msrest.serialization.Model):
+    """KeyValuePairStringJToken.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value: Anything.
+    :vartype value: any
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value: Anything.
+        :paramtype value: any
+        """
+        super(KeyValuePairStringJToken, self).__init__(**kwargs)
+        self.key = kwargs.get('key', None)
+        self.value = kwargs.get('value', None)
+
+
+class Link(msrest.serialization.Model):
+    """Link.
+
+    :ivar context:
+    :vartype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+    :ivar attributes: Gets the collection of attributes associated with the link.
+    :vartype attributes: dict[str, any]
+    """
+
+    _attribute_map = {
+        'context': {'key': 'context', 'type': 'SpanContext'},
+        'attributes': {'key': 'attributes', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword context:
+        :paramtype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+        :keyword attributes: Gets the collection of attributes associated with the link.
+        :paramtype attributes: dict[str, any]
+        """
+        super(Link, self).__init__(**kwargs)
+        self.context = kwargs.get('context', None)
+        self.attributes = kwargs.get('attributes', None)
+
+
+class ListGenericResourceMetrics(msrest.serialization.Model):
+    """ListGenericResourceMetrics.
+
+    :ivar resource_id:
+    :vartype resource_id: str
+    :ivar metric_names:
+    :vartype metric_names: list[str]
+    :ivar label_filters: Dictionary of :code:`<string>`.
+    :vartype label_filters: dict[str, str]
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    """
+
+    _attribute_map = {
+        'resource_id': {'key': 'resourceId', 'type': 'str'},
+        'metric_names': {'key': 'metricNames', 'type': '[str]'},
+        'label_filters': {'key': 'labelFilters', 'type': '{str}'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword resource_id:
+        :paramtype resource_id: str
+        :keyword metric_names:
+        :paramtype metric_names: list[str]
+        :keyword label_filters: Dictionary of :code:`<string>`.
+        :paramtype label_filters: dict[str, str]
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        """
+        super(ListGenericResourceMetrics, self).__init__(**kwargs)
+        self.resource_id = kwargs.get('resource_id', None)
+        self.metric_names = kwargs.get('metric_names', None)
+        self.label_filters = kwargs.get('label_filters', None)
+        self.metric_namespace = kwargs.get('metric_namespace', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+
+
+class ListMetrics(msrest.serialization.Model):
+    """ListMetrics.
+
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    """
+
+    _attribute_map = {
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        """
+        super(ListMetrics, self).__init__(**kwargs)
+        self.metric_namespace = kwargs.get('metric_namespace', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+
+
+class MetricDefinition(msrest.serialization.Model):
+    """MetricDefinition.
+
+    :ivar metric_key:
+    :vartype metric_key: ~azure.mgmt.machinelearningservices.models.DerivedMetricKey
+    :ivar columns: Dictionary of :code:`<MetricValueType>`.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    """
+
+    _attribute_map = {
+        'metric_key': {'key': 'metricKey', 'type': 'DerivedMetricKey'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric_key:
+        :paramtype metric_key: ~azure.mgmt.machinelearningservices.models.DerivedMetricKey
+        :keyword columns: Dictionary of :code:`<MetricValueType>`.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        """
+        super(MetricDefinition, self).__init__(**kwargs)
+        self.metric_key = kwargs.get('metric_key', None)
+        self.columns = kwargs.get('columns', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class MetricProperties(msrest.serialization.Model):
+    """MetricProperties.
+
+    :ivar ux_metric_type: String value UX uses to decide how to render your metrics
+     Ex: azureml.v1.scalar or azureml.v1.table.
+    :vartype ux_metric_type: str
+    """
+
+    _attribute_map = {
+        'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword ux_metric_type: String value UX uses to decide how to render your metrics
+         Ex: azureml.v1.scalar or azureml.v1.table.
+        :paramtype ux_metric_type: str
+        """
+        super(MetricProperties, self).__init__(**kwargs)
+        self.ux_metric_type = kwargs.get('ux_metric_type', None)
+
+
+class MetricSample(msrest.serialization.Model):
+    """MetricSample.
+
+    :ivar derived_label_values: Dictionary of :code:`<string>`.
+    :vartype derived_label_values: dict[str, str]
+    :ivar is_partial_result:
+    :vartype is_partial_result: bool
+    :ivar num_values_logged:
+    :vartype num_values_logged: long
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value:
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'derived_label_values': {'key': 'derivedLabelValues', 'type': '{str}'},
+        'is_partial_result': {'key': 'isPartialResult', 'type': 'bool'},
+        'num_values_logged': {'key': 'numValuesLogged', 'type': 'long'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword derived_label_values: Dictionary of :code:`<string>`.
+        :paramtype derived_label_values: dict[str, str]
+        :keyword is_partial_result:
+        :paramtype is_partial_result: bool
+        :keyword num_values_logged:
+        :paramtype num_values_logged: long
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value:
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(MetricSample, self).__init__(**kwargs)
+        self.derived_label_values = kwargs.get('derived_label_values', None)
+        self.is_partial_result = kwargs.get('is_partial_result', None)
+        self.num_values_logged = kwargs.get('num_values_logged', None)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.name = kwargs.get('name', None)
+        self.columns = kwargs.get('columns', None)
+        self.properties = kwargs.get('properties', None)
+        self.namespace = kwargs.get('namespace', None)
+        self.standard_schema_id = kwargs.get('standard_schema_id', None)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class MetricSchema(msrest.serialization.Model):
+    """MetricSchema.
+
+    :ivar num_properties:
+    :vartype num_properties: int
+    :ivar properties:
+    :vartype properties: list[~azure.mgmt.machinelearningservices.models.MetricSchemaProperty]
+    """
+
+    _attribute_map = {
+        'num_properties': {'key': 'numProperties', 'type': 'int'},
+        'properties': {'key': 'properties', 'type': '[MetricSchemaProperty]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword num_properties:
+        :paramtype num_properties: int
+        :keyword properties:
+        :paramtype properties: list[~azure.mgmt.machinelearningservices.models.MetricSchemaProperty]
+        """
+        super(MetricSchema, self).__init__(**kwargs)
+        self.num_properties = kwargs.get('num_properties', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class MetricSchemaProperty(msrest.serialization.Model):
+    """MetricSchemaProperty.
+
+    :ivar property_id:
+    :vartype property_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'property_id': {'key': 'propertyId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword property_id:
+        :paramtype property_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(MetricSchemaProperty, self).__init__(**kwargs)
+        self.property_id = kwargs.get('property_id', None)
+        self.name = kwargs.get('name', None)
+        self.type = kwargs.get('type', None)
+
+
+class MetricV2(msrest.serialization.Model):
+    """Sequence of one or many values sharing a common DataContainerId, Name, and Schema.
+
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value:
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value:
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(MetricV2, self).__init__(**kwargs)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.name = kwargs.get('name', None)
+        self.columns = kwargs.get('columns', None)
+        self.properties = kwargs.get('properties', None)
+        self.namespace = kwargs.get('namespace', None)
+        self.standard_schema_id = kwargs.get('standard_schema_id', None)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class MetricV2Value(msrest.serialization.Model):
+    """An individual value logged within a Metric.
+
+    :ivar metric_id: Unique Id for this metric value
+     Format is either a Guid or a Guid augmented with an additional int index for cases where
+     multiple metric values shared a
+       MetricId in the old schema.
+    :vartype metric_id: str
+    :ivar created_utc: Client specified timestamp for this metric value.
+    :vartype created_utc: ~datetime.datetime
+    :ivar step:
+    :vartype step: long
+    :ivar data: Dictionary mapping column names (specified as the keys in MetricV2Dto.Columns) to
+     values expressed in type associated
+     with that column in the metric's schema.
+    :vartype data: dict[str, any]
+    """
+
+    _attribute_map = {
+        'metric_id': {'key': 'metricId', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'step': {'key': 'step', 'type': 'long'},
+        'data': {'key': 'data', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric_id: Unique Id for this metric value
+         Format is either a Guid or a Guid augmented with an additional int index for cases where
+         multiple metric values shared a
+           MetricId in the old schema.
+        :paramtype metric_id: str
+        :keyword created_utc: Client specified timestamp for this metric value.
+        :paramtype created_utc: ~datetime.datetime
+        :keyword step:
+        :paramtype step: long
+        :keyword data: Dictionary mapping column names (specified as the keys in MetricV2Dto.Columns)
+         to values expressed in type associated
+         with that column in the metric's schema.
+        :paramtype data: dict[str, any]
+        """
+        super(MetricV2Value, self).__init__(**kwargs)
+        self.metric_id = kwargs.get('metric_id', None)
+        self.created_utc = kwargs.get('created_utc', None)
+        self.step = kwargs.get('step', None)
+        self.data = kwargs.get('data', None)
+
+
+class ModifyExperiment(msrest.serialization.Model):
+    """ModifyExperiment.
+
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar archive:
+    :vartype archive: bool
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'archive': {'key': 'archive', 'type': 'bool'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword archive:
+        :paramtype archive: bool
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        """
+        super(ModifyExperiment, self).__init__(**kwargs)
+        self.name = kwargs.get('name', None)
+        self.description = kwargs.get('description', None)
+        self.tags = kwargs.get('tags', None)
+        self.archive = kwargs.get('archive', None)
+        self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
+
+
+class OutputDatasetLineage(msrest.serialization.Model):
+    """OutputDatasetLineage.
+
+    :ivar identifier:
+    :vartype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+    :ivar output_type: Possible values include: "RunOutput", "Reference".
+    :vartype output_type: str or ~azure.mgmt.machinelearningservices.models.DatasetOutputType
+    :ivar output_details:
+    :vartype output_details: ~azure.mgmt.machinelearningservices.models.DatasetOutputDetails
+    """
+
+    _attribute_map = {
+        'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
+        'output_type': {'key': 'outputType', 'type': 'str'},
+        'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword identifier:
+        :paramtype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+        :keyword output_type: Possible values include: "RunOutput", "Reference".
+        :paramtype output_type: str or ~azure.mgmt.machinelearningservices.models.DatasetOutputType
+        :keyword output_details:
+        :paramtype output_details: ~azure.mgmt.machinelearningservices.models.DatasetOutputDetails
+        """
+        super(OutputDatasetLineage, self).__init__(**kwargs)
+        self.identifier = kwargs.get('identifier', None)
+        self.output_type = kwargs.get('output_type', None)
+        self.output_details = kwargs.get('output_details', None)
+
+
+class PaginatedArtifactContentInformationList(msrest.serialization.Model):
+    """A paginated list of ArtifactContentInformations.
+
+    :ivar value: An array of objects of type ArtifactContentInformation.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[ArtifactContentInformation]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type ArtifactContentInformation.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedArtifactContentInformationList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedArtifactList(msrest.serialization.Model):
+    """A paginated list of Artifacts.
+
+    :ivar value: An array of objects of type Artifact.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Artifact]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Artifact]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Artifact.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Artifact]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedArtifactList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedExperimentList(msrest.serialization.Model):
+    """A paginated list of Experiments.
+
+    :ivar value: An array of objects of type Experiment.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Experiment]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Experiment]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Experiment.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Experiment]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedExperimentList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedMetricDefinitionList(msrest.serialization.Model):
+    """A paginated list of MetricDefinitions.
+
+    :ivar value: An array of objects of type MetricDefinition.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricDefinition]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[MetricDefinition]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type MetricDefinition.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricDefinition]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedMetricDefinitionList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedRunList(msrest.serialization.Model):
+    """A paginated list of Runs.
+
+    :ivar value: An array of objects of type Run.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Run]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Run]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Run.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Run]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedRunList, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedSpanDefinition1List(msrest.serialization.Model):
+    """A paginated list of SpanDefinition`1s.
+
+    :ivar value: An array of objects of type SpanDefinition`1.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[SpanDefinition1]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type SpanDefinition`1.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedSpanDefinition1List, self).__init__(**kwargs)
+        self.value = kwargs.get('value', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.next_link = kwargs.get('next_link', None)
+
+
+class PostRunMetricsError(msrest.serialization.Model):
+    """PostRunMetricsError.
+
+    :ivar metric: Sequence of one or many values sharing a common  DataContainerId, Name, and
+     Schema. Used only for Post Metrics.
+    :vartype metric: ~azure.mgmt.machinelearningservices.models.IMetricV2
+    :ivar error_response: The error response.
+    :vartype error_response: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    """
+
+    _attribute_map = {
+        'metric': {'key': 'metric', 'type': 'IMetricV2'},
+        'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric: Sequence of one or many values sharing a common  DataContainerId, Name, and
+         Schema. Used only for Post Metrics.
+        :paramtype metric: ~azure.mgmt.machinelearningservices.models.IMetricV2
+        :keyword error_response: The error response.
+        :paramtype error_response: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        """
+        super(PostRunMetricsError, self).__init__(**kwargs)
+        self.metric = kwargs.get('metric', None)
+        self.error_response = kwargs.get('error_response', None)
+
+
+class PostRunMetricsResult(msrest.serialization.Model):
+    """PostRunMetricsResult.
+
+    :ivar errors:
+    :vartype errors: list[~azure.mgmt.machinelearningservices.models.PostRunMetricsError]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '[PostRunMetricsError]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword errors:
+        :paramtype errors: list[~azure.mgmt.machinelearningservices.models.PostRunMetricsError]
+        """
+        super(PostRunMetricsResult, self).__init__(**kwargs)
+        self.errors = kwargs.get('errors', None)
+
+
+class QueryParams(msrest.serialization.Model):
+    """The set of supported filters.
+
+    :ivar filter: Allows for filtering the collection of resources.
+     The expression specified is evaluated for each resource in the collection, and only items
+     where the expression evaluates to true are included in the response.
+     See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+     details on the expression syntax.
+    :vartype filter: str
+    :ivar continuation_token: The continuation token to use for getting the next set of resources.
+    :vartype continuation_token: str
+    :ivar order_by: The comma separated list of resource properties to use for sorting the
+     requested resources.
+     Optionally, can be followed by either 'asc' or 'desc'.
+    :vartype order_by: str
+    :ivar top: The maximum number of items in the resource collection to be included in the result.
+     If not specified, all items are returned.
+    :vartype top: int
+    """
+
+    _attribute_map = {
+        'filter': {'key': 'filter', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'order_by': {'key': 'orderBy', 'type': 'str'},
+        'top': {'key': 'top', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+         See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+         details on the expression syntax.
+        :paramtype filter: str
+        :keyword continuation_token: The continuation token to use for getting the next set of
+         resources.
+        :paramtype continuation_token: str
+        :keyword order_by: The comma separated list of resource properties to use for sorting the
+         requested resources.
+         Optionally, can be followed by either 'asc' or 'desc'.
+        :paramtype order_by: str
+        :keyword top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :paramtype top: int
+        """
+        super(QueryParams, self).__init__(**kwargs)
+        self.filter = kwargs.get('filter', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.order_by = kwargs.get('order_by', None)
+        self.top = kwargs.get('top', None)
+
+
+class QueueingInfo(msrest.serialization.Model):
+    """QueueingInfo.
+
+    :ivar code:
+    :vartype code: str
+    :ivar message:
+    :vartype message: str
+    :ivar last_refresh_timestamp:
+    :vartype last_refresh_timestamp: ~datetime.datetime
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'message': {'key': 'message', 'type': 'str'},
+        'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword code:
+        :paramtype code: str
+        :keyword message:
+        :paramtype message: str
+        :keyword last_refresh_timestamp:
+        :paramtype last_refresh_timestamp: ~datetime.datetime
+        """
+        super(QueueingInfo, self).__init__(**kwargs)
+        self.code = kwargs.get('code', None)
+        self.message = kwargs.get('message', None)
+        self.last_refresh_timestamp = kwargs.get('last_refresh_timestamp', None)
+
+
+class RetrieveFullFidelityMetricRequest(msrest.serialization.Model):
+    """RetrieveFullFidelityMetricRequest.
+
+    :ivar metric_name:
+    :vartype metric_name: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    :ivar start_time:
+    :vartype start_time: ~datetime.datetime
+    :ivar end_time:
+    :vartype end_time: ~datetime.datetime
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    """
+
+    _attribute_map = {
+        'metric_name': {'key': 'metricName', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword metric_name:
+        :paramtype metric_name: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        :keyword start_time:
+        :paramtype start_time: ~datetime.datetime
+        :keyword end_time:
+        :paramtype end_time: ~datetime.datetime
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        """
+        super(RetrieveFullFidelityMetricRequest, self).__init__(**kwargs)
+        self.metric_name = kwargs.get('metric_name', None)
+        self.continuation_token = kwargs.get('continuation_token', None)
+        self.start_time = kwargs.get('start_time', None)
+        self.end_time = kwargs.get('end_time', None)
+        self.metric_namespace = kwargs.get('metric_namespace', None)
+
+
+class RootError(msrest.serialization.Model):
+    """The root error.
+
+    :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
+     ValidationError, AzureStorageError, TransientError, RequestThrottled.
+    :vartype code: str
+    :ivar severity: The Severity of error.
+    :vartype severity: int
+    :ivar message: A human-readable representation of the error.
+    :vartype message: str
+    :ivar message_format: An unformatted version of the message with no variable substitution.
+    :vartype message_format: str
+    :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
+    :vartype message_parameters: dict[str, str]
+    :ivar reference_code: This code can optionally be set by the system generating the error.
+     It should be used to classify the problem and identify the module and code area where the
+     failure occured.
+    :vartype reference_code: str
+    :ivar details_uri: A URI which points to more details about the context of the error.
+    :vartype details_uri: str
+    :ivar target: The target of the error (e.g., the name of the property in error).
+    :vartype target: str
+    :ivar details: The related errors that occurred during the request.
+    :vartype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    :ivar additional_info: The error additional info.
+    :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'severity': {'key': 'severity', 'type': 'int'},
+        'message': {'key': 'message', 'type': 'str'},
+        'message_format': {'key': 'messageFormat', 'type': 'str'},
+        'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
+        'reference_code': {'key': 'referenceCode', 'type': 'str'},
+        'details_uri': {'key': 'detailsUri', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'details': {'key': 'details', 'type': '[RootError]'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+        'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
+         ValidationError, AzureStorageError, TransientError, RequestThrottled.
+        :paramtype code: str
+        :keyword severity: The Severity of error.
+        :paramtype severity: int
+        :keyword message: A human-readable representation of the error.
+        :paramtype message: str
+        :keyword message_format: An unformatted version of the message with no variable substitution.
+        :paramtype message_format: str
+        :keyword message_parameters: Value substitutions corresponding to the contents of
+         MessageFormat.
+        :paramtype message_parameters: dict[str, str]
+        :keyword reference_code: This code can optionally be set by the system generating the error.
+         It should be used to classify the problem and identify the module and code area where the
+         failure occured.
+        :paramtype reference_code: str
+        :keyword details_uri: A URI which points to more details about the context of the error.
+        :paramtype details_uri: str
+        :keyword target: The target of the error (e.g., the name of the property in error).
+        :paramtype target: str
+        :keyword details: The related errors that occurred during the request.
+        :paramtype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        :keyword additional_info: The error additional info.
+        :paramtype additional_info:
+         list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+        """
+        super(RootError, self).__init__(**kwargs)
+        self.code = kwargs.get('code', None)
+        self.severity = kwargs.get('severity', None)
+        self.message = kwargs.get('message', None)
+        self.message_format = kwargs.get('message_format', None)
+        self.message_parameters = kwargs.get('message_parameters', None)
+        self.reference_code = kwargs.get('reference_code', None)
+        self.details_uri = kwargs.get('details_uri', None)
+        self.target = kwargs.get('target', None)
+        self.details = kwargs.get('details', None)
+        self.inner_error = kwargs.get('inner_error', None)
+        self.additional_info = kwargs.get('additional_info', None)
+
+
+class Run(msrest.serialization.Model):
+    """The definition of a Run.
+
+    :ivar run_number:
+    :vartype run_number: int
+    :ivar root_run_id:
+    :vartype root_run_id: str
+    :ivar created_utc: The time the run was created in UTC.
+    :vartype created_utc: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar user_id: The Id of the user that created the run.
+    :vartype user_id: str
+    :ivar token: A token used for authenticating a run.
+    :vartype token: str
+    :ivar token_expiry_time_utc: The Token expiration time in UTC.
+    :vartype token_expiry_time_utc: ~datetime.datetime
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar warnings: A list of warnings that occurred during the run.
+    :vartype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+    :ivar revision:
+    :vartype revision: long
+    :ivar status_revision:
+    :vartype status_revision: long
+    :ivar run_uuid: A system generated Id for the run.
+    :vartype run_uuid: str
+    :ivar parent_run_uuid: A system generated Id for the run's parent.
+    :vartype parent_run_uuid: str
+    :ivar root_run_uuid: A system generated Id for the root of the run's hierarchy.
+    :vartype root_run_uuid: str
+    :ivar has_virtual_parent: Indicates if this is a child of a virtual run.
+    :vartype has_virtual_parent: bool
+    :ivar last_start_time_utc: The last timestamp when a run transitioned from paused to running.
+     Initialized when StartTimeUtc is first set.
+    :vartype last_start_time_utc: ~datetime.datetime
+    :ivar current_compute_time: The cumulative time spent in an active status for an active run.
+    :vartype current_compute_time: str
+    :ivar compute_duration: The cumulative time spent in an active status for a terminal run.
+    :vartype compute_duration: str
+    :ivar effective_start_time_utc: A relative start time set as LastStartTimeUtc - ComputeTime for
+     active runs. This allows sorting active runs on how long they have been active, since an actual
+     active duration cannot be frequently updated.
+    :vartype effective_start_time_utc: ~datetime.datetime
+    :ivar last_modified_by:
+    :vartype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar last_modified_utc: The time the run was created in UTC.
+    :vartype last_modified_utc: ~datetime.datetime
+    :ivar duration: The total duration of a run.
+    :vartype duration: str
+    :ivar cancelation_reason: The cancelation Reason if the run was canceled.
+    :vartype cancelation_reason: str
+    :ivar run_id: The identifier for the run. Run IDs must be less than 256 characters and contain
+     only alphanumeric characters with dashes and underscores.
+    :vartype run_id: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+    :vartype parent_run_id: str
+    :ivar experiment_id: The Id of the experiment that created this run.
+    :vartype experiment_id: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar options:
+    :vartype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+    :ivar is_virtual: A virtual run can set an active child run that will override the virtual run
+     status and properties.
+    :vartype is_virtual: bool
+    :ivar display_name:
+    :vartype display_name: str
+    :ivar name:
+    :vartype name: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar description:
+    :vartype description: str
+    :ivar hidden:
+    :vartype hidden: bool
+    :ivar run_type:
+    :vartype run_type: str
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar parameters: Dictionary of :code:`<any>`.
+    :vartype parameters: dict[str, any]
+    :ivar action_uris: Dictionary of :code:`<string>`.
+    :vartype action_uris: dict[str, str]
+    :ivar script_name:
+    :vartype script_name: str
+    :ivar target:
+    :vartype target: str
+    :ivar unique_child_run_compute_targets:
+    :vartype unique_child_run_compute_targets: list[str]
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar settings: Dictionary of :code:`<string>`.
+    :vartype settings: dict[str, str]
+    :ivar services: Dictionary of :code:`<EndpointSetting>`.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets:
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets:
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    :ivar primary_metric_name:
+    :vartype primary_metric_name: str
+    :ivar created_from:
+    :vartype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+    :ivar cancel_uri:
+    :vartype cancel_uri: str
+    :ivar complete_uri:
+    :vartype complete_uri: str
+    :ivar diagnostics_uri:
+    :vartype diagnostics_uri: str
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar queueing_info:
+    :vartype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+    :ivar active_child_run_id: The RunId of the active child on a virtual run.
+    :vartype active_child_run_id: str
+    :ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'unique_child_run_compute_targets': {'unique': True},
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_number': {'key': 'runNumber', 'type': 'int'},
+        'root_run_id': {'key': 'rootRunId', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'user_id': {'key': 'userId', 'type': 'str'},
+        'token': {'key': 'token', 'type': 'str'},
+        'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'warnings': {'key': 'warnings', 'type': '[RunDetailsWarning]'},
+        'revision': {'key': 'revision', 'type': 'long'},
+        'status_revision': {'key': 'statusRevision', 'type': 'long'},
+        'run_uuid': {'key': 'runUuid', 'type': 'str'},
+        'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
+        'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
+        'has_virtual_parent': {'key': 'hasVirtualParent', 'type': 'bool'},
+        'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
+        'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
+        'compute_duration': {'key': 'computeDuration', 'type': 'str'},
+        'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
+        'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
+        'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
+        'duration': {'key': 'duration', 'type': 'str'},
+        'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'},
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'options': {'key': 'options', 'type': 'RunOptions'},
+        'is_virtual': {'key': 'isVirtual', 'type': 'bool'},
+        'display_name': {'key': 'displayName', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'hidden': {'key': 'hidden', 'type': 'bool'},
+        'run_type': {'key': 'runType', 'type': 'str'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'action_uris': {'key': 'actionUris', 'type': '{str}'},
+        'script_name': {'key': 'scriptName', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+        'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+        'created_from': {'key': 'createdFrom', 'type': 'CreatedFrom'},
+        'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
+        'complete_uri': {'key': 'completeUri', 'type': 'str'},
+        'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
+        'active_child_run_id': {'key': 'activeChildRunId', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_number:
+        :paramtype run_number: int
+        :keyword root_run_id:
+        :paramtype root_run_id: str
+        :keyword created_utc: The time the run was created in UTC.
+        :paramtype created_utc: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword user_id: The Id of the user that created the run.
+        :paramtype user_id: str
+        :keyword token: A token used for authenticating a run.
+        :paramtype token: str
+        :keyword token_expiry_time_utc: The Token expiration time in UTC.
+        :paramtype token_expiry_time_utc: ~datetime.datetime
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword warnings: A list of warnings that occurred during the run.
+        :paramtype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+        :keyword revision:
+        :paramtype revision: long
+        :keyword status_revision:
+        :paramtype status_revision: long
+        :keyword run_uuid: A system generated Id for the run.
+        :paramtype run_uuid: str
+        :keyword parent_run_uuid: A system generated Id for the run's parent.
+        :paramtype parent_run_uuid: str
+        :keyword root_run_uuid: A system generated Id for the root of the run's hierarchy.
+        :paramtype root_run_uuid: str
+        :keyword has_virtual_parent: Indicates if this is a child of a virtual run.
+        :paramtype has_virtual_parent: bool
+        :keyword last_start_time_utc: The last timestamp when a run transitioned from paused to
+         running. Initialized when StartTimeUtc is first set.
+        :paramtype last_start_time_utc: ~datetime.datetime
+        :keyword current_compute_time: The cumulative time spent in an active status for an active run.
+        :paramtype current_compute_time: str
+        :keyword compute_duration: The cumulative time spent in an active status for a terminal run.
+        :paramtype compute_duration: str
+        :keyword effective_start_time_utc: A relative start time set as LastStartTimeUtc - ComputeTime
+         for active runs. This allows sorting active runs on how long they have been active, since an
+         actual active duration cannot be frequently updated.
+        :paramtype effective_start_time_utc: ~datetime.datetime
+        :keyword last_modified_by:
+        :paramtype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword last_modified_utc: The time the run was created in UTC.
+        :paramtype last_modified_utc: ~datetime.datetime
+        :keyword duration: The total duration of a run.
+        :paramtype duration: str
+        :keyword cancelation_reason: The cancelation Reason if the run was canceled.
+        :paramtype cancelation_reason: str
+        :keyword run_id: The identifier for the run. Run IDs must be less than 256 characters and
+         contain only alphanumeric characters with dashes and underscores.
+        :paramtype run_id: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+        :paramtype parent_run_id: str
+        :keyword experiment_id: The Id of the experiment that created this run.
+        :paramtype experiment_id: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword options:
+        :paramtype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+        :keyword is_virtual: A virtual run can set an active child run that will override the virtual
+         run status and properties.
+        :paramtype is_virtual: bool
+        :keyword display_name:
+        :paramtype display_name: str
+        :keyword name:
+        :paramtype name: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword description:
+        :paramtype description: str
+        :keyword hidden:
+        :paramtype hidden: bool
+        :keyword run_type:
+        :paramtype run_type: str
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: Dictionary of :code:`<any>`.
+        :paramtype parameters: dict[str, any]
+        :keyword action_uris: Dictionary of :code:`<string>`.
+        :paramtype action_uris: dict[str, str]
+        :keyword script_name:
+        :paramtype script_name: str
+        :keyword target:
+        :paramtype target: str
+        :keyword unique_child_run_compute_targets:
+        :paramtype unique_child_run_compute_targets: list[str]
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword settings: Dictionary of :code:`<string>`.
+        :paramtype settings: dict[str, str]
+        :keyword services: Dictionary of :code:`<EndpointSetting>`.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets:
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets:
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        :keyword primary_metric_name:
+        :paramtype primary_metric_name: str
+        :keyword created_from:
+        :paramtype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+        :keyword cancel_uri:
+        :paramtype cancel_uri: str
+        :keyword complete_uri:
+        :paramtype complete_uri: str
+        :keyword diagnostics_uri:
+        :paramtype diagnostics_uri: str
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword queueing_info:
+        :paramtype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+        :keyword active_child_run_id: The RunId of the active child on a virtual run.
+        :paramtype active_child_run_id: str
+        :keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(Run, self).__init__(**kwargs)
+        self.run_number = kwargs.get('run_number', None)
+        self.root_run_id = kwargs.get('root_run_id', None)
+        self.created_utc = kwargs.get('created_utc', None)
+        self.created_by = kwargs.get('created_by', None)
+        self.user_id = kwargs.get('user_id', None)
+        self.token = kwargs.get('token', None)
+        self.token_expiry_time_utc = kwargs.get('token_expiry_time_utc', None)
+        self.error = kwargs.get('error', None)
+        self.warnings = kwargs.get('warnings', None)
+        self.revision = kwargs.get('revision', None)
+        self.status_revision = kwargs.get('status_revision', None)
+        self.run_uuid = kwargs.get('run_uuid', None)
+        self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
+        self.root_run_uuid = kwargs.get('root_run_uuid', None)
+        self.has_virtual_parent = kwargs.get('has_virtual_parent', None)
+        self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
+        self.current_compute_time = kwargs.get('current_compute_time', None)
+        self.compute_duration = kwargs.get('compute_duration', None)
+        self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
+        self.last_modified_by = kwargs.get('last_modified_by', None)
+        self.last_modified_utc = kwargs.get('last_modified_utc', None)
+        self.duration = kwargs.get('duration', None)
+        self.cancelation_reason = kwargs.get('cancelation_reason', None)
+        self.run_id = kwargs.get('run_id', None)
+        self.parent_run_id = kwargs.get('parent_run_id', None)
+        self.experiment_id = kwargs.get('experiment_id', None)
+        self.status = kwargs.get('status', None)
+        self.start_time_utc = kwargs.get('start_time_utc', None)
+        self.end_time_utc = kwargs.get('end_time_utc', None)
+        self.options = kwargs.get('options', None)
+        self.is_virtual = kwargs.get('is_virtual', None)
+        self.display_name = kwargs.get('display_name', None)
+        self.name = kwargs.get('name', None)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.description = kwargs.get('description', None)
+        self.hidden = kwargs.get('hidden', None)
+        self.run_type = kwargs.get('run_type', None)
+        self.run_type_v2 = kwargs.get('run_type_v2', None)
+        self.properties = kwargs.get('properties', None)
+        self.parameters = kwargs.get('parameters', None)
+        self.action_uris = kwargs.get('action_uris', None)
+        self.script_name = kwargs.get('script_name', None)
+        self.target = kwargs.get('target', None)
+        self.unique_child_run_compute_targets = kwargs.get('unique_child_run_compute_targets', None)
+        self.tags = kwargs.get('tags', None)
+        self.settings = kwargs.get('settings', None)
+        self.services = kwargs.get('services', None)
+        self.input_datasets = kwargs.get('input_datasets', None)
+        self.output_datasets = kwargs.get('output_datasets', None)
+        self.run_definition = kwargs.get('run_definition', None)
+        self.job_specification = kwargs.get('job_specification', None)
+        self.primary_metric_name = kwargs.get('primary_metric_name', None)
+        self.created_from = kwargs.get('created_from', None)
+        self.cancel_uri = kwargs.get('cancel_uri', None)
+        self.complete_uri = kwargs.get('complete_uri', None)
+        self.diagnostics_uri = kwargs.get('diagnostics_uri', None)
+        self.compute_request = kwargs.get('compute_request', None)
+        self.compute = kwargs.get('compute', None)
+        self.retain_for_lifetime_of_workspace = kwargs.get('retain_for_lifetime_of_workspace', None)
+        self.queueing_info = kwargs.get('queueing_info', None)
+        self.active_child_run_id = kwargs.get('active_child_run_id', None)
+        self.inputs = kwargs.get('inputs', None)
+        self.outputs = kwargs.get('outputs', None)
+
+
+class RunDetails(msrest.serialization.Model):
+    """The details of the run.
+
+    :ivar run_id: The identifier for the run.
+    :vartype run_id: str
+    :ivar run_uuid: A system generated Id for the run.
+    :vartype run_uuid: str
+    :ivar parent_run_uuid: A system generated Id for the run's parent.
+    :vartype parent_run_uuid: str
+    :ivar root_run_uuid: A system generated Id for the root of the run's hierarchy.
+    :vartype root_run_uuid: str
+    :ivar target: The name of the compute target where the run is executed.
+    :vartype target: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical.
+    :vartype parent_run_id: str
+    :ivar created_time_utc: The creation time of the run in UTC.
+    :vartype created_time_utc: ~datetime.datetime
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar warnings: A list of warnings that occurred during the run.
+    :vartype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+    :ivar tags: A set of tags. The tag dictionary for the run. Tags are mutable.
+    :vartype tags: dict[str, str]
+    :ivar properties: The properties dictionary for the run. Properties are immutable.
+    :vartype properties: dict[str, str]
+    :ivar parameters: The parameters dictionary for the run. Parameters are immutable.
+    :vartype parameters: dict[str, any]
+    :ivar services: The interactive run services for a run. Services are mutable.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets: A list of dataset used as input to the run.
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets: A list of dataset used as output to the run.
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: The run definition specification.
+    :vartype run_definition: any
+    :ivar log_files: Dictionary of :code:`<string>`.
+    :vartype log_files: dict[str, str]
+    :ivar job_cost:
+    :vartype job_cost: ~azure.mgmt.machinelearningservices.models.JobCost
+    :ivar revision:
+    :vartype revision: long
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar settings: The run settings.
+    :vartype settings: dict[str, str]
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar compute_duration: Time spent in an active state for terminal runs.
+    :vartype compute_duration: str
+    :ivar effective_start_time_utc: Relative start time of active runs for ordering and computing
+     active compute duration.
+     Compute duration of an active run is now() - EffectiveStartTimeUtc.
+    :vartype effective_start_time_utc: ~datetime.datetime
+    :ivar run_number: Relative start time of active runs for ordering and computing active compute
+     duration.
+     Compute duration of an active run is now() - EffectiveStartTimeUtc.
+    :vartype run_number: int
+    :ivar root_run_id:
+    :vartype root_run_id: str
+    :ivar user_id: The Id of the user that created the run.
+    :vartype user_id: str
+    :ivar status_revision:
+    :vartype status_revision: long
+    :ivar has_virtual_parent: Indicates if this is a child of a virtual run.
+    :vartype has_virtual_parent: bool
+    :ivar current_compute_time: The cumulative time spent in an active status for an active run.
+    :vartype current_compute_time: str
+    :ivar last_start_time_utc: The last timestamp when a run transitioned from paused to running.
+     Initialized when StartTimeUtc is first set.
+    :vartype last_start_time_utc: ~datetime.datetime
+    :ivar last_modified_by:
+    :vartype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar last_modified_utc: The time the run was created in UTC.
+    :vartype last_modified_utc: ~datetime.datetime
+    :ivar duration: The total duration of a run.
+    :vartype duration: str
+    :ivar inputs: The inputs for the run.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: The outputs for the run.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'run_uuid': {'key': 'runUuid', 'type': 'str'},
+        'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
+        'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'warnings': {'key': 'warnings', 'type': '[RunDetailsWarning]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'log_files': {'key': 'logFiles', 'type': '{str}'},
+        'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
+        'revision': {'key': 'revision', 'type': 'long'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'compute_duration': {'key': 'computeDuration', 'type': 'str'},
+        'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
+        'run_number': {'key': 'runNumber', 'type': 'int'},
+        'root_run_id': {'key': 'rootRunId', 'type': 'str'},
+        'user_id': {'key': 'userId', 'type': 'str'},
+        'status_revision': {'key': 'statusRevision', 'type': 'long'},
+        'has_virtual_parent': {'key': 'hasVirtualParent', 'type': 'bool'},
+        'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
+        'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
+        'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
+        'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
+        'duration': {'key': 'duration', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id: The identifier for the run.
+        :paramtype run_id: str
+        :keyword run_uuid: A system generated Id for the run.
+        :paramtype run_uuid: str
+        :keyword parent_run_uuid: A system generated Id for the run's parent.
+        :paramtype parent_run_uuid: str
+        :keyword root_run_uuid: A system generated Id for the root of the run's hierarchy.
+        :paramtype root_run_uuid: str
+        :keyword target: The name of the compute target where the run is executed.
+        :paramtype target: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical.
+        :paramtype parent_run_id: str
+        :keyword created_time_utc: The creation time of the run in UTC.
+        :paramtype created_time_utc: ~datetime.datetime
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword warnings: A list of warnings that occurred during the run.
+        :paramtype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+        :keyword tags: A set of tags. The tag dictionary for the run. Tags are mutable.
+        :paramtype tags: dict[str, str]
+        :keyword properties: The properties dictionary for the run. Properties are immutable.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: The parameters dictionary for the run. Parameters are immutable.
+        :paramtype parameters: dict[str, any]
+        :keyword services: The interactive run services for a run. Services are mutable.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets: A list of dataset used as input to the run.
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets: A list of dataset used as output to the run.
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: The run definition specification.
+        :paramtype run_definition: any
+        :keyword log_files: Dictionary of :code:`<string>`.
+        :paramtype log_files: dict[str, str]
+        :keyword job_cost:
+        :paramtype job_cost: ~azure.mgmt.machinelearningservices.models.JobCost
+        :keyword revision:
+        :paramtype revision: long
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword settings: The run settings.
+        :paramtype settings: dict[str, str]
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword compute_duration: Time spent in an active state for terminal runs.
+        :paramtype compute_duration: str
+        :keyword effective_start_time_utc: Relative start time of active runs for ordering and
+         computing active compute duration.
+         Compute duration of an active run is now() - EffectiveStartTimeUtc.
+        :paramtype effective_start_time_utc: ~datetime.datetime
+        :keyword run_number: Relative start time of active runs for ordering and computing active
+         compute duration.
+         Compute duration of an active run is now() - EffectiveStartTimeUtc.
+        :paramtype run_number: int
+        :keyword root_run_id:
+        :paramtype root_run_id: str
+        :keyword user_id: The Id of the user that created the run.
+        :paramtype user_id: str
+        :keyword status_revision:
+        :paramtype status_revision: long
+        :keyword has_virtual_parent: Indicates if this is a child of a virtual run.
+        :paramtype has_virtual_parent: bool
+        :keyword current_compute_time: The cumulative time spent in an active status for an active run.
+        :paramtype current_compute_time: str
+        :keyword last_start_time_utc: The last timestamp when a run transitioned from paused to
+         running. Initialized when StartTimeUtc is first set.
+        :paramtype last_start_time_utc: ~datetime.datetime
+        :keyword last_modified_by:
+        :paramtype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword last_modified_utc: The time the run was created in UTC.
+        :paramtype last_modified_utc: ~datetime.datetime
+        :keyword duration: The total duration of a run.
+        :paramtype duration: str
+        :keyword inputs: The inputs for the run.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: The outputs for the run.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(RunDetails, self).__init__(**kwargs)
+        self.run_id = kwargs.get('run_id', None)
+        self.run_uuid = kwargs.get('run_uuid', None)
+        self.parent_run_uuid = kwargs.get('parent_run_uuid', None)
+        self.root_run_uuid = kwargs.get('root_run_uuid', None)
+        self.target = kwargs.get('target', None)
+        self.status = kwargs.get('status', None)
+        self.parent_run_id = kwargs.get('parent_run_id', None)
+        self.created_time_utc = kwargs.get('created_time_utc', None)
+        self.start_time_utc = kwargs.get('start_time_utc', None)
+        self.end_time_utc = kwargs.get('end_time_utc', None)
+        self.error = kwargs.get('error', None)
+        self.warnings = kwargs.get('warnings', None)
+        self.tags = kwargs.get('tags', None)
+        self.properties = kwargs.get('properties', None)
+        self.parameters = kwargs.get('parameters', None)
+        self.services = kwargs.get('services', None)
+        self.input_datasets = kwargs.get('input_datasets', None)
+        self.output_datasets = kwargs.get('output_datasets', None)
+        self.run_definition = kwargs.get('run_definition', None)
+        self.log_files = kwargs.get('log_files', None)
+        self.job_cost = kwargs.get('job_cost', None)
+        self.revision = kwargs.get('revision', None)
+        self.run_type_v2 = kwargs.get('run_type_v2', None)
+        self.settings = kwargs.get('settings', None)
+        self.compute_request = kwargs.get('compute_request', None)
+        self.compute = kwargs.get('compute', None)
+        self.created_by = kwargs.get('created_by', None)
+        self.compute_duration = kwargs.get('compute_duration', None)
+        self.effective_start_time_utc = kwargs.get('effective_start_time_utc', None)
+        self.run_number = kwargs.get('run_number', None)
+        self.root_run_id = kwargs.get('root_run_id', None)
+        self.user_id = kwargs.get('user_id', None)
+        self.status_revision = kwargs.get('status_revision', None)
+        self.has_virtual_parent = kwargs.get('has_virtual_parent', None)
+        self.current_compute_time = kwargs.get('current_compute_time', None)
+        self.last_start_time_utc = kwargs.get('last_start_time_utc', None)
+        self.last_modified_by = kwargs.get('last_modified_by', None)
+        self.last_modified_utc = kwargs.get('last_modified_utc', None)
+        self.duration = kwargs.get('duration', None)
+        self.inputs = kwargs.get('inputs', None)
+        self.outputs = kwargs.get('outputs', None)
+
+
+class RunDetailsWarning(msrest.serialization.Model):
+    """RunDetailsWarning.
+
+    :ivar source:
+    :vartype source: str
+    :ivar message:
+    :vartype message: str
+    """
+
+    _attribute_map = {
+        'source': {'key': 'source', 'type': 'str'},
+        'message': {'key': 'message', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword source:
+        :paramtype source: str
+        :keyword message:
+        :paramtype message: str
+        """
+        super(RunDetailsWarning, self).__init__(**kwargs)
+        self.source = kwargs.get('source', None)
+        self.message = kwargs.get('message', None)
+
+
+class RunMetric(msrest.serialization.Model):
+    """RunMetric.
+
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar metric_id:
+    :vartype metric_id: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar metric_type:
+    :vartype metric_type: str
+    :ivar created_utc:
+    :vartype created_utc: ~datetime.datetime
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar label:
+    :vartype label: str
+    :ivar num_cells:
+    :vartype num_cells: int
+    :ivar data_location:
+    :vartype data_location: str
+    :ivar cells:
+    :vartype cells: list[dict[str, any]]
+    :ivar schema:
+    :vartype schema: ~azure.mgmt.machinelearningservices.models.MetricSchema
+    """
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'metric_id': {'key': 'metricId', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'metric_type': {'key': 'metricType', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'label': {'key': 'label', 'type': 'str'},
+        'num_cells': {'key': 'numCells', 'type': 'int'},
+        'data_location': {'key': 'dataLocation', 'type': 'str'},
+        'cells': {'key': 'cells', 'type': '[{object}]'},
+        'schema': {'key': 'schema', 'type': 'MetricSchema'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword metric_id:
+        :paramtype metric_id: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword metric_type:
+        :paramtype metric_type: str
+        :keyword created_utc:
+        :paramtype created_utc: ~datetime.datetime
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword label:
+        :paramtype label: str
+        :keyword num_cells:
+        :paramtype num_cells: int
+        :keyword data_location:
+        :paramtype data_location: str
+        :keyword cells:
+        :paramtype cells: list[dict[str, any]]
+        :keyword schema:
+        :paramtype schema: ~azure.mgmt.machinelearningservices.models.MetricSchema
+        """
+        super(RunMetric, self).__init__(**kwargs)
+        self.run_id = kwargs.get('run_id', None)
+        self.metric_id = kwargs.get('metric_id', None)
+        self.data_container_id = kwargs.get('data_container_id', None)
+        self.metric_type = kwargs.get('metric_type', None)
+        self.created_utc = kwargs.get('created_utc', None)
+        self.name = kwargs.get('name', None)
+        self.description = kwargs.get('description', None)
+        self.label = kwargs.get('label', None)
+        self.num_cells = kwargs.get('num_cells', None)
+        self.data_location = kwargs.get('data_location', None)
+        self.cells = kwargs.get('cells', None)
+        self.schema = kwargs.get('schema', None)
+
+
+class RunOptions(msrest.serialization.Model):
+    """RunOptions.
+
+    :ivar generate_data_container_id_if_not_specified:
+    :vartype generate_data_container_id_if_not_specified: bool
+    """
+
+    _attribute_map = {
+        'generate_data_container_id_if_not_specified': {'key': 'generateDataContainerIdIfNotSpecified', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword generate_data_container_id_if_not_specified:
+        :paramtype generate_data_container_id_if_not_specified: bool
+        """
+        super(RunOptions, self).__init__(**kwargs)
+        self.generate_data_container_id_if_not_specified = kwargs.get('generate_data_container_id_if_not_specified', None)
+
+
+class RunServiceInstances(msrest.serialization.Model):
+    """RunServiceInstances.
+
+    :ivar instances: Dictionary of :code:`<ServiceInstanceResult>`.
+    :vartype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstanceResult]
+    """
+
+    _attribute_map = {
+        'instances': {'key': 'instances', 'type': '{ServiceInstanceResult}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword instances: Dictionary of :code:`<ServiceInstanceResult>`.
+        :paramtype instances: dict[str,
+         ~azure.mgmt.machinelearningservices.models.ServiceInstanceResult]
+        """
+        super(RunServiceInstances, self).__init__(**kwargs)
+        self.instances = kwargs.get('instances', None)
+
+
+class RunStatusSpans(msrest.serialization.Model):
+    """RunStatusSpans.
+
+    :ivar spans:
+    :vartype spans: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+    """
+
+    _attribute_map = {
+        'spans': {'key': 'spans', 'type': '[SpanDefinition1]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword spans:
+        :paramtype spans: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+        """
+        super(RunStatusSpans, self).__init__(**kwargs)
+        self.spans = kwargs.get('spans', None)
+
+
+class RunTypeV2(msrest.serialization.Model):
+    """RunTypeV2.
+
+    :ivar orchestrator:
+    :vartype orchestrator: str
+    :ivar traits:
+    :vartype traits: list[str]
+    :ivar attribution:
+    :vartype attribution: str
+    :ivar compute_type:
+    :vartype compute_type: str
+    """
+
+    _validation = {
+        'traits': {'unique': True},
+    }
+
+    _attribute_map = {
+        'orchestrator': {'key': 'orchestrator', 'type': 'str'},
+        'traits': {'key': 'traits', 'type': '[str]'},
+        'attribution': {'key': 'attribution', 'type': 'str'},
+        'compute_type': {'key': 'computeType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword orchestrator:
+        :paramtype orchestrator: str
+        :keyword traits:
+        :paramtype traits: list[str]
+        :keyword attribution:
+        :paramtype attribution: str
+        :keyword compute_type:
+        :paramtype compute_type: str
+        """
+        super(RunTypeV2, self).__init__(**kwargs)
+        self.orchestrator = kwargs.get('orchestrator', None)
+        self.traits = kwargs.get('traits', None)
+        self.attribution = kwargs.get('attribution', None)
+        self.compute_type = kwargs.get('compute_type', None)
+
+
+class ServiceInstance(msrest.serialization.Model):
+    """ServiceInstance.
+
+    :ivar is_single_node:
+    :vartype is_single_node: bool
+    :ivar error_message:
+    :vartype error_message: str
+    :ivar port:
+    :vartype port: int
+    :ivar status:
+    :vartype status: str
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'is_single_node': {'key': 'isSingleNode', 'type': 'bool'},
+        'error_message': {'key': 'errorMessage', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword is_single_node:
+        :paramtype is_single_node: bool
+        :keyword error_message:
+        :paramtype error_message: str
+        :keyword port:
+        :paramtype port: int
+        :keyword status:
+        :paramtype status: str
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(ServiceInstance, self).__init__(**kwargs)
+        self.is_single_node = kwargs.get('is_single_node', None)
+        self.error_message = kwargs.get('error_message', None)
+        self.port = kwargs.get('port', None)
+        self.status = kwargs.get('status', None)
+        self.error = kwargs.get('error', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class ServiceInstanceResult(msrest.serialization.Model):
+    """ServiceInstanceResult.
+
+    :ivar type:
+    :vartype type: str
+    :ivar port:
+    :vartype port: int
+    :ivar status:
+    :vartype status: str
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar endpoint:
+    :vartype endpoint: str
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'endpoint': {'key': 'endpoint', 'type': 'str'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword type:
+        :paramtype type: str
+        :keyword port:
+        :paramtype port: int
+        :keyword status:
+        :paramtype status: str
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword endpoint:
+        :paramtype endpoint: str
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(ServiceInstanceResult, self).__init__(**kwargs)
+        self.type = kwargs.get('type', None)
+        self.port = kwargs.get('port', None)
+        self.status = kwargs.get('status', None)
+        self.error = kwargs.get('error', None)
+        self.endpoint = kwargs.get('endpoint', None)
+        self.properties = kwargs.get('properties', None)
+
+
+class SpanContext(msrest.serialization.Model):
+    """SpanContext.
+
+    :ivar trace_id: Gets the TraceId associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+     TODO: In actual spec, it is ActivityTraceId type. But that causes problems in
+     serialization/deserialization.
+    :vartype trace_id: str
+    :ivar span_id: Gets the SpanId associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+     TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+     serialization/deserialization.
+    :vartype span_id: str
+    :ivar is_remote: Gets a value indicating whether this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext
+     was propagated from a remote parent.
+    :vartype is_remote: bool
+    :ivar is_valid: Gets a value indicating whether this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext is valid.
+    :vartype is_valid: bool
+    :ivar tracestate: Gets the
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.Tracestate associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+    :vartype tracestate: list[~azure.mgmt.machinelearningservices.models.KeyValuePairString]
+    """
+
+    _attribute_map = {
+        'trace_id': {'key': 'traceId', 'type': 'str'},
+        'span_id': {'key': 'spanId', 'type': 'str'},
+        'is_remote': {'key': 'isRemote', 'type': 'bool'},
+        'is_valid': {'key': 'isValid', 'type': 'bool'},
+        'tracestate': {'key': 'tracestate', 'type': '[KeyValuePairString]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword trace_id: Gets the TraceId associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+         TODO: In actual spec, it is ActivityTraceId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype trace_id: str
+        :keyword span_id: Gets the SpanId associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+         TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype span_id: str
+        :keyword is_remote: Gets a value indicating whether this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext
+         was propagated from a remote parent.
+        :paramtype is_remote: bool
+        :keyword is_valid: Gets a value indicating whether this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext is valid.
+        :paramtype is_valid: bool
+        :keyword tracestate: Gets the
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.Tracestate associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+        :paramtype tracestate: list[~azure.mgmt.machinelearningservices.models.KeyValuePairString]
+        """
+        super(SpanContext, self).__init__(**kwargs)
+        self.trace_id = kwargs.get('trace_id', None)
+        self.span_id = kwargs.get('span_id', None)
+        self.is_remote = kwargs.get('is_remote', None)
+        self.is_valid = kwargs.get('is_valid', None)
+        self.tracestate = kwargs.get('tracestate', None)
+
+
+class SpanDefinition1(msrest.serialization.Model):
+    """Most of the code in this class is vendored from here.
+https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry/Trace/Export/SpanData.cs
+SpanData on that github link is readonly, we can't set properties on it after creation. So, just vendoring the Span
+contract.
+TStatus is the status enum. For runs, it is RunStatus
+This is the link for span spec https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/overview.md#span.
+
+    :ivar context:
+    :vartype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+    :ivar name: Gets span name.
+    :vartype name: str
+    :ivar status: Gets span status.
+     OpenTelemetry sets it to
+     https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry.Api/Trace/Status.cs
+     That status enums are not very meaningful to us, so we customize this. Possible values
+     include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued",
+     "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled".
+    :vartype status: str or ~azure.mgmt.machinelearningservices.models.RunStatus
+    :ivar parent_span_id: Gets parent span id.
+     TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+     serialization/deserialization.
+    :vartype parent_span_id: str
+    :ivar attributes: Gets attributes.
+    :vartype attributes: list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringJToken]
+    :ivar events: Gets events.
+    :vartype events: list[~azure.mgmt.machinelearningservices.models.Event]
+    :ivar links: Gets links.
+    :vartype links: list[~azure.mgmt.machinelearningservices.models.Link]
+    :ivar start_timestamp: Gets span start timestamp.
+    :vartype start_timestamp: ~datetime.datetime
+    :ivar end_timestamp: Gets span end timestamp.
+    :vartype end_timestamp: ~datetime.datetime
+    """
+
+    _attribute_map = {
+        'context': {'key': 'context', 'type': 'SpanContext'},
+        'name': {'key': 'name', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'parent_span_id': {'key': 'parentSpanId', 'type': 'str'},
+        'attributes': {'key': 'attributes', 'type': '[KeyValuePairStringJToken]'},
+        'events': {'key': 'events', 'type': '[Event]'},
+        'links': {'key': 'links', 'type': '[Link]'},
+        'start_timestamp': {'key': 'startTimestamp', 'type': 'iso-8601'},
+        'end_timestamp': {'key': 'endTimestamp', 'type': 'iso-8601'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword context:
+        :paramtype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+        :keyword name: Gets span name.
+        :paramtype name: str
+        :keyword status: Gets span status.
+         OpenTelemetry sets it to
+         https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry.Api/Trace/Status.cs
+         That status enums are not very meaningful to us, so we customize this. Possible values
+         include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued",
+         "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled".
+        :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RunStatus
+        :keyword parent_span_id: Gets parent span id.
+         TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype parent_span_id: str
+        :keyword attributes: Gets attributes.
+        :paramtype attributes:
+         list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringJToken]
+        :keyword events: Gets events.
+        :paramtype events: list[~azure.mgmt.machinelearningservices.models.Event]
+        :keyword links: Gets links.
+        :paramtype links: list[~azure.mgmt.machinelearningservices.models.Link]
+        :keyword start_timestamp: Gets span start timestamp.
+        :paramtype start_timestamp: ~datetime.datetime
+        :keyword end_timestamp: Gets span end timestamp.
+        :paramtype end_timestamp: ~datetime.datetime
+        """
+        super(SpanDefinition1, self).__init__(**kwargs)
+        self.context = kwargs.get('context', None)
+        self.name = kwargs.get('name', None)
+        self.status = kwargs.get('status', None)
+        self.parent_span_id = kwargs.get('parent_span_id', None)
+        self.attributes = kwargs.get('attributes', None)
+        self.events = kwargs.get('events', None)
+        self.links = kwargs.get('links', None)
+        self.start_timestamp = kwargs.get('start_timestamp', None)
+        self.end_timestamp = kwargs.get('end_timestamp', None)
+
+
+class SqlDataPath(msrest.serialization.Model):
+    """SqlDataPath.
+
+    :ivar sql_table_name:
+    :vartype sql_table_name: str
+    :ivar sql_query:
+    :vartype sql_query: str
+    :ivar sql_stored_procedure_name:
+    :vartype sql_stored_procedure_name: str
+    :ivar sql_stored_procedure_params:
+    :vartype sql_stored_procedure_params:
+     list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+    """
+
+    _attribute_map = {
+        'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
+        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
+        'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
+        'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword sql_table_name:
+        :paramtype sql_table_name: str
+        :keyword sql_query:
+        :paramtype sql_query: str
+        :keyword sql_stored_procedure_name:
+        :paramtype sql_stored_procedure_name: str
+        :keyword sql_stored_procedure_params:
+        :paramtype sql_stored_procedure_params:
+         list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+        """
+        super(SqlDataPath, self).__init__(**kwargs)
+        self.sql_table_name = kwargs.get('sql_table_name', None)
+        self.sql_query = kwargs.get('sql_query', None)
+        self.sql_stored_procedure_name = kwargs.get('sql_stored_procedure_name', None)
+        self.sql_stored_procedure_params = kwargs.get('sql_stored_procedure_params', None)
+
+
+class StoredProcedureParameter(msrest.serialization.Model):
+    """StoredProcedureParameter.
+
+    :ivar name:
+    :vartype name: str
+    :ivar value:
+    :vartype value: str
+    :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword value:
+        :paramtype value: str
+        :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+        """
+        super(StoredProcedureParameter, self).__init__(**kwargs)
+        self.name = kwargs.get('name', None)
+        self.value = kwargs.get('value', None)
+        self.type = kwargs.get('type', None)
+
+
+class TypedAssetReference(msrest.serialization.Model):
+    """TypedAssetReference.
+
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(TypedAssetReference, self).__init__(**kwargs)
+        self.asset_id = kwargs.get('asset_id', None)
+        self.type = kwargs.get('type', None)
+
+
+class User(msrest.serialization.Model):
+    """User.
+
+    :ivar user_object_id: A user or service principal's object ID.
+     This is EUPI and may only be logged to warm path telemetry.
+    :vartype user_object_id: str
+    :ivar user_pu_id: A user or service principal's PuID.
+     This is PII and should never be logged.
+    :vartype user_pu_id: str
+    :ivar user_idp: A user identity provider. Eg live.com
+     This is PII and should never be logged.
+    :vartype user_idp: str
+    :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
+     identity provider system Eg.1:live.com:puid
+     This is PII and should never be logged.
+    :vartype user_alt_sec_id: str
+    :ivar user_iss: The issuer which issed the token for this user.
+     This is PII and should never be logged.
+    :vartype user_iss: str
+    :ivar user_tenant_id: A user or service principal's tenant ID.
+    :vartype user_tenant_id: str
+    :ivar user_name: A user's full name or a service principal's app ID.
+     This is PII and should never be logged.
+    :vartype user_name: str
+    :ivar upn: A user's Principal name (upn)
+     This is PII andshould never be logged.
+    :vartype upn: str
+    """
+
+    _attribute_map = {
+        'user_object_id': {'key': 'userObjectId', 'type': 'str'},
+        'user_pu_id': {'key': 'userPuId', 'type': 'str'},
+        'user_idp': {'key': 'userIdp', 'type': 'str'},
+        'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
+        'user_iss': {'key': 'userIss', 'type': 'str'},
+        'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
+        'user_name': {'key': 'userName', 'type': 'str'},
+        'upn': {'key': 'upn', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        **kwargs
+    ):
+        """
+        :keyword user_object_id: A user or service principal's object ID.
+         This is EUPI and may only be logged to warm path telemetry.
+        :paramtype user_object_id: str
+        :keyword user_pu_id: A user or service principal's PuID.
+         This is PII and should never be logged.
+        :paramtype user_pu_id: str
+        :keyword user_idp: A user identity provider. Eg live.com
+         This is PII and should never be logged.
+        :paramtype user_idp: str
+        :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
+         identity provider system Eg.1:live.com:puid
+         This is PII and should never be logged.
+        :paramtype user_alt_sec_id: str
+        :keyword user_iss: The issuer which issed the token for this user.
+         This is PII and should never be logged.
+        :paramtype user_iss: str
+        :keyword user_tenant_id: A user or service principal's tenant ID.
+        :paramtype user_tenant_id: str
+        :keyword user_name: A user's full name or a service principal's app ID.
+         This is PII and should never be logged.
+        :paramtype user_name: str
+        :keyword upn: A user's Principal name (upn)
+         This is PII andshould never be logged.
+        :paramtype upn: str
+        """
+        super(User, self).__init__(**kwargs)
+        self.user_object_id = kwargs.get('user_object_id', None)
+        self.user_pu_id = kwargs.get('user_pu_id', None)
+        self.user_idp = kwargs.get('user_idp', None)
+        self.user_alt_sec_id = kwargs.get('user_alt_sec_id', None)
+        self.user_iss = kwargs.get('user_iss', None)
+        self.user_tenant_id = kwargs.get('user_tenant_id', None)
+        self.user_name = kwargs.get('user_name', None)
+        self.upn = kwargs.get('upn', None)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models_py3.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models_py3.py
new file mode 100644
index 00000000..4b482905
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/models/_models_py3.py
@@ -0,0 +1,4854 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class AddOrModifyRunServiceInstancesRequest(msrest.serialization.Model):
+    """AddOrModifyRunServiceInstancesRequest.
+
+    :ivar instances: Dictionary of :code:`<ServiceInstance>`.
+    :vartype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstance]
+    """
+
+    _attribute_map = {
+        'instances': {'key': 'instances', 'type': '{ServiceInstance}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        instances: Optional[Dict[str, "ServiceInstance"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword instances: Dictionary of :code:`<ServiceInstance>`.
+        :paramtype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstance]
+        """
+        super(AddOrModifyRunServiceInstancesRequest, self).__init__(**kwargs)
+        self.instances = instances
+
+
+class Artifact(msrest.serialization.Model):
+    """Details of an Artifact.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar artifact_id: The identifier of an Artifact. Format of ArtifactId -
+     {Origin}/{Container}/{Path}.
+    :vartype artifact_id: str
+    :ivar origin: Required. The origin of the Artifact creation request. Available origins are
+     'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset' and 'Unknown'.
+    :vartype origin: str
+    :ivar container: Required. The name of container. Artifacts can be grouped by container.
+    :vartype container: str
+    :ivar path: Required. The path to the Artifact in a container.
+    :vartype path: str
+    :ivar etag: The Etag of the Artifact.
+    :vartype etag: str
+    :ivar created_time: The Date and Time at which the Artifact is created. The DateTime is in UTC.
+    :vartype created_time: ~datetime.datetime
+    :ivar data_path:
+    :vartype data_path: ~azure.mgmt.machinelearningservices.models.ArtifactDataPath
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _validation = {
+        'origin': {'required': True},
+        'container': {'required': True},
+        'path': {'required': True},
+    }
+
+    _attribute_map = {
+        'artifact_id': {'key': 'artifactId', 'type': 'str'},
+        'origin': {'key': 'origin', 'type': 'str'},
+        'container': {'key': 'container', 'type': 'str'},
+        'path': {'key': 'path', 'type': 'str'},
+        'etag': {'key': 'etag', 'type': 'str'},
+        'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+        'data_path': {'key': 'dataPath', 'type': 'ArtifactDataPath'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        origin: str,
+        container: str,
+        path: str,
+        artifact_id: Optional[str] = None,
+        etag: Optional[str] = None,
+        created_time: Optional[datetime.datetime] = None,
+        data_path: Optional["ArtifactDataPath"] = None,
+        tags: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword artifact_id: The identifier of an Artifact. Format of ArtifactId -
+         {Origin}/{Container}/{Path}.
+        :paramtype artifact_id: str
+        :keyword origin: Required. The origin of the Artifact creation request. Available origins are
+         'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset' and 'Unknown'.
+        :paramtype origin: str
+        :keyword container: Required. The name of container. Artifacts can be grouped by container.
+        :paramtype container: str
+        :keyword path: Required. The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword etag: The Etag of the Artifact.
+        :paramtype etag: str
+        :keyword created_time: The Date and Time at which the Artifact is created. The DateTime is in
+         UTC.
+        :paramtype created_time: ~datetime.datetime
+        :keyword data_path:
+        :paramtype data_path: ~azure.mgmt.machinelearningservices.models.ArtifactDataPath
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(Artifact, self).__init__(**kwargs)
+        self.artifact_id = artifact_id
+        self.origin = origin
+        self.container = container
+        self.path = path
+        self.etag = etag
+        self.created_time = created_time
+        self.data_path = data_path
+        self.tags = tags
+
+
+class ArtifactContentInformation(msrest.serialization.Model):
+    """Details of an Artifact Content Information.
+
+    :ivar content_uri: The URI of the content.
+    :vartype content_uri: str
+    :ivar origin: The origin of the Artifact creation request. Available origins are
+     'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset', 'ComputeRecord', 'Metric', and
+     'Unknown'.
+    :vartype origin: str
+    :ivar container: The name of container. Artifacts can be grouped by container.
+    :vartype container: str
+    :ivar path: The path to the Artifact in a container.
+    :vartype path: str
+    :ivar tags: A set of tags. The tags on the artifact.
+    :vartype tags: dict[str, str]
+    """
+
+    _attribute_map = {
+        'content_uri': {'key': 'contentUri', 'type': 'str'},
+        'origin': {'key': 'origin', 'type': 'str'},
+        'container': {'key': 'container', 'type': 'str'},
+        'path': {'key': 'path', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        content_uri: Optional[str] = None,
+        origin: Optional[str] = None,
+        container: Optional[str] = None,
+        path: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword content_uri: The URI of the content.
+        :paramtype content_uri: str
+        :keyword origin: The origin of the Artifact creation request. Available origins are
+         'ExperimentRun', 'LocalUpload', 'WebUpload', 'Dataset', 'ComputeRecord', 'Metric', and
+         'Unknown'.
+        :paramtype origin: str
+        :keyword container: The name of container. Artifacts can be grouped by container.
+        :paramtype container: str
+        :keyword path: The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword tags: A set of tags. The tags on the artifact.
+        :paramtype tags: dict[str, str]
+        """
+        super(ArtifactContentInformation, self).__init__(**kwargs)
+        self.content_uri = content_uri
+        self.origin = origin
+        self.container = container
+        self.path = path
+        self.tags = tags
+
+
+class ArtifactDataPath(msrest.serialization.Model):
+    """ArtifactDataPath.
+
+    :ivar data_store_name:
+    :vartype data_store_name: str
+    :ivar relative_path:
+    :vartype relative_path: str
+    :ivar sql_data_path:
+    :vartype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+    """
+
+    _attribute_map = {
+        'data_store_name': {'key': 'dataStoreName', 'type': 'str'},
+        'relative_path': {'key': 'relativePath', 'type': 'str'},
+        'sql_data_path': {'key': 'sqlDataPath', 'type': 'SqlDataPath'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_store_name: Optional[str] = None,
+        relative_path: Optional[str] = None,
+        sql_data_path: Optional["SqlDataPath"] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_store_name:
+        :paramtype data_store_name: str
+        :keyword relative_path:
+        :paramtype relative_path: str
+        :keyword sql_data_path:
+        :paramtype sql_data_path: ~azure.mgmt.machinelearningservices.models.SqlDataPath
+        """
+        super(ArtifactDataPath, self).__init__(**kwargs)
+        self.data_store_name = data_store_name
+        self.relative_path = relative_path
+        self.sql_data_path = sql_data_path
+
+
+class ArtifactPath(msrest.serialization.Model):
+    """Details of an Artifact Path.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar path: Required. The path to the Artifact in a container.
+    :vartype path: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    """
+
+    _validation = {
+        'path': {'required': True},
+    }
+
+    _attribute_map = {
+        'path': {'key': 'path', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        path: str,
+        tags: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword path: Required. The path to the Artifact in a container.
+        :paramtype path: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        """
+        super(ArtifactPath, self).__init__(**kwargs)
+        self.path = path
+        self.tags = tags
+
+
+class ArtifactPathList(msrest.serialization.Model):
+    """Contains list of Artifact Paths.
+
+    All required parameters must be populated in order to send to Azure.
+
+    :ivar paths: Required. List of Artifact Paths.
+    :vartype paths: list[~azure.mgmt.machinelearningservices.models.ArtifactPath]
+    """
+
+    _validation = {
+        'paths': {'required': True},
+    }
+
+    _attribute_map = {
+        'paths': {'key': 'paths', 'type': '[ArtifactPath]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        paths: List["ArtifactPath"],
+        **kwargs
+    ):
+        """
+        :keyword paths: Required. List of Artifact Paths.
+        :paramtype paths: list[~azure.mgmt.machinelearningservices.models.ArtifactPath]
+        """
+        super(ArtifactPathList, self).__init__(**kwargs)
+        self.paths = paths
+
+
+class BaseEvent(msrest.serialization.Model):
+    """Base event is the envelope used to post event data to the Event controller.
+
+    :ivar timestamp:
+    :vartype timestamp: ~datetime.datetime
+    :ivar name:
+    :vartype name: str
+    :ivar data: Anything.
+    :vartype data: any
+    """
+
+    _attribute_map = {
+        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data': {'key': 'data', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        timestamp: Optional[datetime.datetime] = None,
+        name: Optional[str] = None,
+        data: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword timestamp:
+        :paramtype timestamp: ~datetime.datetime
+        :keyword name:
+        :paramtype name: str
+        :keyword data: Anything.
+        :paramtype data: any
+        """
+        super(BaseEvent, self).__init__(**kwargs)
+        self.timestamp = timestamp
+        self.name = name
+        self.data = data
+
+
+class BatchAddOrModifyRunRequest(msrest.serialization.Model):
+    """BatchAddOrModifyRunRequest.
+
+    :ivar runs:
+    :vartype runs: list[~azure.mgmt.machinelearningservices.models.CreateRun]
+    """
+
+    _attribute_map = {
+        'runs': {'key': 'runs', 'type': '[CreateRun]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        runs: Optional[List["CreateRun"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword runs:
+        :paramtype runs: list[~azure.mgmt.machinelearningservices.models.CreateRun]
+        """
+        super(BatchAddOrModifyRunRequest, self).__init__(**kwargs)
+        self.runs = runs
+
+
+class BatchArtifactContentInformationResult(msrest.serialization.Model):
+    """Results of the Batch Artifact Content Information request.
+
+    :ivar artifacts: Artifact details of the Artifact Ids requested.
+    :vartype artifacts: dict[str, ~azure.mgmt.machinelearningservices.models.Artifact]
+    :ivar artifact_content_information: Artifact Content Information details of the Artifact Ids
+     requested.
+    :vartype artifact_content_information: dict[str,
+     ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+    :ivar errors: Errors occurred while fetching the requested Artifact Ids.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'artifacts': {'key': 'artifacts', 'type': '{Artifact}'},
+        'artifact_content_information': {'key': 'artifactContentInformation', 'type': '{ArtifactContentInformation}'},
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        artifacts: Optional[Dict[str, "Artifact"]] = None,
+        artifact_content_information: Optional[Dict[str, "ArtifactContentInformation"]] = None,
+        errors: Optional[Dict[str, "ErrorResponse"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword artifacts: Artifact details of the Artifact Ids requested.
+        :paramtype artifacts: dict[str, ~azure.mgmt.machinelearningservices.models.Artifact]
+        :keyword artifact_content_information: Artifact Content Information details of the Artifact Ids
+         requested.
+        :paramtype artifact_content_information: dict[str,
+         ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+        :keyword errors: Errors occurred while fetching the requested Artifact Ids.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchArtifactContentInformationResult, self).__init__(**kwargs)
+        self.artifacts = artifacts
+        self.artifact_content_information = artifact_content_information
+        self.errors = errors
+
+
+class BatchEventCommand(msrest.serialization.Model):
+    """BatchEventCommand.
+
+    :ivar events:
+    :vartype events: list[~azure.mgmt.machinelearningservices.models.BaseEvent]
+    """
+
+    _attribute_map = {
+        'events': {'key': 'events', 'type': '[BaseEvent]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        events: Optional[List["BaseEvent"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword events:
+        :paramtype events: list[~azure.mgmt.machinelearningservices.models.BaseEvent]
+        """
+        super(BatchEventCommand, self).__init__(**kwargs)
+        self.events = events
+
+
+class BatchEventCommandResult(msrest.serialization.Model):
+    """BatchEventCommandResult.
+
+    :ivar errors:
+    :vartype errors:
+     list[~azure.mgmt.machinelearningservices.models.KeyValuePairBaseEventErrorResponse]
+    :ivar successes:
+    :vartype successes: list[str]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '[KeyValuePairBaseEventErrorResponse]'},
+        'successes': {'key': 'successes', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        errors: Optional[List["KeyValuePairBaseEventErrorResponse"]] = None,
+        successes: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword errors:
+        :paramtype errors:
+         list[~azure.mgmt.machinelearningservices.models.KeyValuePairBaseEventErrorResponse]
+        :keyword successes:
+        :paramtype successes: list[str]
+        """
+        super(BatchEventCommandResult, self).__init__(**kwargs)
+        self.errors = errors
+        self.successes = successes
+
+
+class BatchIMetricV2(msrest.serialization.Model):
+    """BatchIMetricV2.
+
+    :ivar values:
+    :vartype values: list[~azure.mgmt.machinelearningservices.models.IMetricV2]
+    :ivar report_errors:
+    :vartype report_errors: bool
+    """
+
+    _attribute_map = {
+        'values': {'key': 'values', 'type': '[IMetricV2]'},
+        'report_errors': {'key': 'reportErrors', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        values: Optional[List["IMetricV2"]] = None,
+        report_errors: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword values:
+        :paramtype values: list[~azure.mgmt.machinelearningservices.models.IMetricV2]
+        :keyword report_errors:
+        :paramtype report_errors: bool
+        """
+        super(BatchIMetricV2, self).__init__(**kwargs)
+        self.values = values
+        self.report_errors = report_errors
+
+
+class BatchRequest1(msrest.serialization.Model):
+    """BatchRequest1.
+
+    :ivar requests: Dictionary of :code:`<GetRunDataRequest>`.
+    :vartype requests: dict[str, ~azure.mgmt.machinelearningservices.models.GetRunDataRequest]
+    """
+
+    _attribute_map = {
+        'requests': {'key': 'requests', 'type': '{GetRunDataRequest}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        requests: Optional[Dict[str, "GetRunDataRequest"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword requests: Dictionary of :code:`<GetRunDataRequest>`.
+        :paramtype requests: dict[str, ~azure.mgmt.machinelearningservices.models.GetRunDataRequest]
+        """
+        super(BatchRequest1, self).__init__(**kwargs)
+        self.requests = requests
+
+
+class BatchResult1(msrest.serialization.Model):
+    """BatchResult1.
+
+    :ivar successful_results: Dictionary of :code:`<GetRunDataResult>`.
+    :vartype successful_results: dict[str,
+     ~azure.mgmt.machinelearningservices.models.GetRunDataResult]
+    :ivar failed_results: Dictionary of :code:`<ErrorResponse>`.
+    :vartype failed_results: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'successful_results': {'key': 'successfulResults', 'type': '{GetRunDataResult}'},
+        'failed_results': {'key': 'failedResults', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        successful_results: Optional[Dict[str, "GetRunDataResult"]] = None,
+        failed_results: Optional[Dict[str, "ErrorResponse"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword successful_results: Dictionary of :code:`<GetRunDataResult>`.
+        :paramtype successful_results: dict[str,
+         ~azure.mgmt.machinelearningservices.models.GetRunDataResult]
+        :keyword failed_results: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype failed_results: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchResult1, self).__init__(**kwargs)
+        self.successful_results = successful_results
+        self.failed_results = failed_results
+
+
+class BatchRunResult(msrest.serialization.Model):
+    """BatchRunResult.
+
+    :ivar runs: Dictionary of :code:`<Run>`.
+    :vartype runs: dict[str, ~azure.mgmt.machinelearningservices.models.Run]
+    :ivar errors: Dictionary of :code:`<ErrorResponse>`.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'runs': {'key': 'runs', 'type': '{Run}'},
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        runs: Optional[Dict[str, "Run"]] = None,
+        errors: Optional[Dict[str, "ErrorResponse"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword runs: Dictionary of :code:`<Run>`.
+        :paramtype runs: dict[str, ~azure.mgmt.machinelearningservices.models.Run]
+        :keyword errors: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(BatchRunResult, self).__init__(**kwargs)
+        self.runs = runs
+        self.errors = errors
+
+
+class Compute(msrest.serialization.Model):
+    """Compute.
+
+    :ivar target:
+    :vartype target: str
+    :ivar target_type:
+    :vartype target_type: str
+    :ivar vm_size:
+    :vartype vm_size: str
+    :ivar instance_count:
+    :vartype instance_count: int
+    :ivar gpu_count:
+    :vartype gpu_count: int
+    :ivar priority:
+    :vartype priority: str
+    :ivar region:
+    :vartype region: str
+    """
+
+    _attribute_map = {
+        'target': {'key': 'target', 'type': 'str'},
+        'target_type': {'key': 'targetType', 'type': 'str'},
+        'vm_size': {'key': 'vmSize', 'type': 'str'},
+        'instance_count': {'key': 'instanceCount', 'type': 'int'},
+        'gpu_count': {'key': 'gpuCount', 'type': 'int'},
+        'priority': {'key': 'priority', 'type': 'str'},
+        'region': {'key': 'region', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        target: Optional[str] = None,
+        target_type: Optional[str] = None,
+        vm_size: Optional[str] = None,
+        instance_count: Optional[int] = None,
+        gpu_count: Optional[int] = None,
+        priority: Optional[str] = None,
+        region: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword target:
+        :paramtype target: str
+        :keyword target_type:
+        :paramtype target_type: str
+        :keyword vm_size:
+        :paramtype vm_size: str
+        :keyword instance_count:
+        :paramtype instance_count: int
+        :keyword gpu_count:
+        :paramtype gpu_count: int
+        :keyword priority:
+        :paramtype priority: str
+        :keyword region:
+        :paramtype region: str
+        """
+        super(Compute, self).__init__(**kwargs)
+        self.target = target
+        self.target_type = target_type
+        self.vm_size = vm_size
+        self.instance_count = instance_count
+        self.gpu_count = gpu_count
+        self.priority = priority
+        self.region = region
+
+
+class ComputeRequest(msrest.serialization.Model):
+    """ComputeRequest.
+
+    :ivar node_count:
+    :vartype node_count: int
+    :ivar gpu_count:
+    :vartype gpu_count: int
+    """
+
+    _attribute_map = {
+        'node_count': {'key': 'nodeCount', 'type': 'int'},
+        'gpu_count': {'key': 'gpuCount', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        *,
+        node_count: Optional[int] = None,
+        gpu_count: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword node_count:
+        :paramtype node_count: int
+        :keyword gpu_count:
+        :paramtype gpu_count: int
+        """
+        super(ComputeRequest, self).__init__(**kwargs)
+        self.node_count = node_count
+        self.gpu_count = gpu_count
+
+
+class CreatedFrom(msrest.serialization.Model):
+    """CreatedFrom.
+
+    :ivar type:  The only acceptable values to pass in are None and "Notebook". The default value
+     is None.
+    :vartype type: str
+    :ivar location_type:  The only acceptable values to pass in are None and "ArtifactId". The
+     default value is None.
+    :vartype location_type: str
+    :ivar location:
+    :vartype location: str
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'location_type': {'key': 'locationType', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        location_type: Optional[str] = None,
+        location: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword type:  The only acceptable values to pass in are None and "Notebook". The default
+         value is None.
+        :paramtype type: str
+        :keyword location_type:  The only acceptable values to pass in are None and "ArtifactId". The
+         default value is None.
+        :paramtype location_type: str
+        :keyword location:
+        :paramtype location: str
+        """
+        super(CreatedFrom, self).__init__(**kwargs)
+        self.type = type
+        self.location_type = location_type
+        self.location = location
+
+
+class CreateRun(msrest.serialization.Model):
+    """CreateRun.
+
+    :ivar run_id: The identifier for the run. Run IDs must be less than 256 characters and contain
+     only alphanumeric characters with dashes and underscores.
+    :vartype run_id: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+    :vartype parent_run_id: str
+    :ivar experiment_id: The Id of the experiment that created this run.
+    :vartype experiment_id: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar options:
+    :vartype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+    :ivar is_virtual: A virtual run can set an active child run that will override the virtual run
+     status and properties.
+    :vartype is_virtual: bool
+    :ivar display_name:
+    :vartype display_name: str
+    :ivar name:
+    :vartype name: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar description:
+    :vartype description: str
+    :ivar hidden:
+    :vartype hidden: bool
+    :ivar run_type:
+    :vartype run_type: str
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar parameters: Dictionary of :code:`<any>`.
+    :vartype parameters: dict[str, any]
+    :ivar action_uris: Dictionary of :code:`<string>`.
+    :vartype action_uris: dict[str, str]
+    :ivar script_name:
+    :vartype script_name: str
+    :ivar target:
+    :vartype target: str
+    :ivar unique_child_run_compute_targets:
+    :vartype unique_child_run_compute_targets: list[str]
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar settings: Dictionary of :code:`<string>`.
+    :vartype settings: dict[str, str]
+    :ivar services: Dictionary of :code:`<EndpointSetting>`.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets:
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets:
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    :ivar primary_metric_name:
+    :vartype primary_metric_name: str
+    :ivar created_from:
+    :vartype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+    :ivar cancel_uri:
+    :vartype cancel_uri: str
+    :ivar complete_uri:
+    :vartype complete_uri: str
+    :ivar diagnostics_uri:
+    :vartype diagnostics_uri: str
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar queueing_info:
+    :vartype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+    :ivar active_child_run_id: The RunId of the active child on a virtual run.
+    :vartype active_child_run_id: str
+    :ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'unique_child_run_compute_targets': {'unique': True},
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'options': {'key': 'options', 'type': 'RunOptions'},
+        'is_virtual': {'key': 'isVirtual', 'type': 'bool'},
+        'display_name': {'key': 'displayName', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'hidden': {'key': 'hidden', 'type': 'bool'},
+        'run_type': {'key': 'runType', 'type': 'str'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'action_uris': {'key': 'actionUris', 'type': '{str}'},
+        'script_name': {'key': 'scriptName', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+        'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+        'created_from': {'key': 'createdFrom', 'type': 'CreatedFrom'},
+        'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
+        'complete_uri': {'key': 'completeUri', 'type': 'str'},
+        'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
+        'active_child_run_id': {'key': 'activeChildRunId', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: Optional[str] = None,
+        parent_run_id: Optional[str] = None,
+        experiment_id: Optional[str] = None,
+        status: Optional[str] = None,
+        start_time_utc: Optional[datetime.datetime] = None,
+        end_time_utc: Optional[datetime.datetime] = None,
+        options: Optional["RunOptions"] = None,
+        is_virtual: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        name: Optional[str] = None,
+        data_container_id: Optional[str] = None,
+        description: Optional[str] = None,
+        hidden: Optional[bool] = None,
+        run_type: Optional[str] = None,
+        run_type_v2: Optional["RunTypeV2"] = None,
+        properties: Optional[Dict[str, str]] = None,
+        parameters: Optional[Dict[str, Any]] = None,
+        action_uris: Optional[Dict[str, str]] = None,
+        script_name: Optional[str] = None,
+        target: Optional[str] = None,
+        unique_child_run_compute_targets: Optional[List[str]] = None,
+        tags: Optional[Dict[str, str]] = None,
+        settings: Optional[Dict[str, str]] = None,
+        services: Optional[Dict[str, "EndpointSetting"]] = None,
+        input_datasets: Optional[List["DatasetLineage"]] = None,
+        output_datasets: Optional[List["OutputDatasetLineage"]] = None,
+        run_definition: Optional[Any] = None,
+        job_specification: Optional[Any] = None,
+        primary_metric_name: Optional[str] = None,
+        created_from: Optional["CreatedFrom"] = None,
+        cancel_uri: Optional[str] = None,
+        complete_uri: Optional[str] = None,
+        diagnostics_uri: Optional[str] = None,
+        compute_request: Optional["ComputeRequest"] = None,
+        compute: Optional["Compute"] = None,
+        retain_for_lifetime_of_workspace: Optional[bool] = None,
+        queueing_info: Optional["QueueingInfo"] = None,
+        active_child_run_id: Optional[str] = None,
+        inputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        outputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_id: The identifier for the run. Run IDs must be less than 256 characters and
+         contain only alphanumeric characters with dashes and underscores.
+        :paramtype run_id: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+        :paramtype parent_run_id: str
+        :keyword experiment_id: The Id of the experiment that created this run.
+        :paramtype experiment_id: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword options:
+        :paramtype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+        :keyword is_virtual: A virtual run can set an active child run that will override the virtual
+         run status and properties.
+        :paramtype is_virtual: bool
+        :keyword display_name:
+        :paramtype display_name: str
+        :keyword name:
+        :paramtype name: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword description:
+        :paramtype description: str
+        :keyword hidden:
+        :paramtype hidden: bool
+        :keyword run_type:
+        :paramtype run_type: str
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: Dictionary of :code:`<any>`.
+        :paramtype parameters: dict[str, any]
+        :keyword action_uris: Dictionary of :code:`<string>`.
+        :paramtype action_uris: dict[str, str]
+        :keyword script_name:
+        :paramtype script_name: str
+        :keyword target:
+        :paramtype target: str
+        :keyword unique_child_run_compute_targets:
+        :paramtype unique_child_run_compute_targets: list[str]
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword settings: Dictionary of :code:`<string>`.
+        :paramtype settings: dict[str, str]
+        :keyword services: Dictionary of :code:`<EndpointSetting>`.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets:
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets:
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        :keyword primary_metric_name:
+        :paramtype primary_metric_name: str
+        :keyword created_from:
+        :paramtype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+        :keyword cancel_uri:
+        :paramtype cancel_uri: str
+        :keyword complete_uri:
+        :paramtype complete_uri: str
+        :keyword diagnostics_uri:
+        :paramtype diagnostics_uri: str
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword queueing_info:
+        :paramtype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+        :keyword active_child_run_id: The RunId of the active child on a virtual run.
+        :paramtype active_child_run_id: str
+        :keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(CreateRun, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.parent_run_id = parent_run_id
+        self.experiment_id = experiment_id
+        self.status = status
+        self.start_time_utc = start_time_utc
+        self.end_time_utc = end_time_utc
+        self.options = options
+        self.is_virtual = is_virtual
+        self.display_name = display_name
+        self.name = name
+        self.data_container_id = data_container_id
+        self.description = description
+        self.hidden = hidden
+        self.run_type = run_type
+        self.run_type_v2 = run_type_v2
+        self.properties = properties
+        self.parameters = parameters
+        self.action_uris = action_uris
+        self.script_name = script_name
+        self.target = target
+        self.unique_child_run_compute_targets = unique_child_run_compute_targets
+        self.tags = tags
+        self.settings = settings
+        self.services = services
+        self.input_datasets = input_datasets
+        self.output_datasets = output_datasets
+        self.run_definition = run_definition
+        self.job_specification = job_specification
+        self.primary_metric_name = primary_metric_name
+        self.created_from = created_from
+        self.cancel_uri = cancel_uri
+        self.complete_uri = complete_uri
+        self.diagnostics_uri = diagnostics_uri
+        self.compute_request = compute_request
+        self.compute = compute
+        self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
+        self.queueing_info = queueing_info
+        self.active_child_run_id = active_child_run_id
+        self.inputs = inputs
+        self.outputs = outputs
+
+
+class DatasetIdentifier(msrest.serialization.Model):
+    """DatasetIdentifier.
+
+    :ivar saved_id:
+    :vartype saved_id: str
+    :ivar registered_id:
+    :vartype registered_id: str
+    :ivar registered_version:
+    :vartype registered_version: str
+    """
+
+    _attribute_map = {
+        'saved_id': {'key': 'savedId', 'type': 'str'},
+        'registered_id': {'key': 'registeredId', 'type': 'str'},
+        'registered_version': {'key': 'registeredVersion', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        saved_id: Optional[str] = None,
+        registered_id: Optional[str] = None,
+        registered_version: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword saved_id:
+        :paramtype saved_id: str
+        :keyword registered_id:
+        :paramtype registered_id: str
+        :keyword registered_version:
+        :paramtype registered_version: str
+        """
+        super(DatasetIdentifier, self).__init__(**kwargs)
+        self.saved_id = saved_id
+        self.registered_id = registered_id
+        self.registered_version = registered_version
+
+
+class DatasetInputDetails(msrest.serialization.Model):
+    """DatasetInputDetails.
+
+    :ivar input_name:
+    :vartype input_name: str
+    :ivar mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
+    :vartype mechanism: str or ~azure.mgmt.machinelearningservices.models.DatasetDeliveryMechanism
+    :ivar path_on_compute:
+    :vartype path_on_compute: str
+    """
+
+    _attribute_map = {
+        'input_name': {'key': 'inputName', 'type': 'str'},
+        'mechanism': {'key': 'mechanism', 'type': 'str'},
+        'path_on_compute': {'key': 'pathOnCompute', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        input_name: Optional[str] = None,
+        mechanism: Optional[Union[str, "DatasetDeliveryMechanism"]] = None,
+        path_on_compute: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword input_name:
+        :paramtype input_name: str
+        :keyword mechanism: Possible values include: "Direct", "Mount", "Download", "Hdfs".
+        :paramtype mechanism: str or
+         ~azure.mgmt.machinelearningservices.models.DatasetDeliveryMechanism
+        :keyword path_on_compute:
+        :paramtype path_on_compute: str
+        """
+        super(DatasetInputDetails, self).__init__(**kwargs)
+        self.input_name = input_name
+        self.mechanism = mechanism
+        self.path_on_compute = path_on_compute
+
+
+class DatasetLineage(msrest.serialization.Model):
+    """DatasetLineage.
+
+    :ivar identifier:
+    :vartype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+    :ivar consumption_type: Possible values include: "RunInput", "Reference".
+    :vartype consumption_type: str or
+     ~azure.mgmt.machinelearningservices.models.DatasetConsumptionType
+    :ivar input_details:
+    :vartype input_details: ~azure.mgmt.machinelearningservices.models.DatasetInputDetails
+    """
+
+    _attribute_map = {
+        'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
+        'consumption_type': {'key': 'consumptionType', 'type': 'str'},
+        'input_details': {'key': 'inputDetails', 'type': 'DatasetInputDetails'},
+    }
+
+    def __init__(
+        self,
+        *,
+        identifier: Optional["DatasetIdentifier"] = None,
+        consumption_type: Optional[Union[str, "DatasetConsumptionType"]] = None,
+        input_details: Optional["DatasetInputDetails"] = None,
+        **kwargs
+    ):
+        """
+        :keyword identifier:
+        :paramtype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+        :keyword consumption_type: Possible values include: "RunInput", "Reference".
+        :paramtype consumption_type: str or
+         ~azure.mgmt.machinelearningservices.models.DatasetConsumptionType
+        :keyword input_details:
+        :paramtype input_details: ~azure.mgmt.machinelearningservices.models.DatasetInputDetails
+        """
+        super(DatasetLineage, self).__init__(**kwargs)
+        self.identifier = identifier
+        self.consumption_type = consumption_type
+        self.input_details = input_details
+
+
+class DatasetOutputDetails(msrest.serialization.Model):
+    """DatasetOutputDetails.
+
+    :ivar output_name:
+    :vartype output_name: str
+    """
+
+    _attribute_map = {
+        'output_name': {'key': 'outputName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        output_name: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword output_name:
+        :paramtype output_name: str
+        """
+        super(DatasetOutputDetails, self).__init__(**kwargs)
+        self.output_name = output_name
+
+
+class DeleteConfiguration(msrest.serialization.Model):
+    """DeleteConfiguration.
+
+    :ivar workspace_id:
+    :vartype workspace_id: str
+    :ivar is_enabled:
+    :vartype is_enabled: bool
+    :ivar cutoff_days:
+    :vartype cutoff_days: int
+    """
+
+    _attribute_map = {
+        'workspace_id': {'key': 'workspaceId', 'type': 'str'},
+        'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
+        'cutoff_days': {'key': 'cutoffDays', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        *,
+        workspace_id: Optional[str] = None,
+        is_enabled: Optional[bool] = None,
+        cutoff_days: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword workspace_id:
+        :paramtype workspace_id: str
+        :keyword is_enabled:
+        :paramtype is_enabled: bool
+        :keyword cutoff_days:
+        :paramtype cutoff_days: int
+        """
+        super(DeleteConfiguration, self).__init__(**kwargs)
+        self.workspace_id = workspace_id
+        self.is_enabled = is_enabled
+        self.cutoff_days = cutoff_days
+
+
+class DeleteExperimentTagsResult(msrest.serialization.Model):
+    """DeleteExperimentTagsResult.
+
+    :ivar errors: Dictionary of :code:`<ErrorResponse>`.
+    :vartype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '{ErrorResponse}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        errors: Optional[Dict[str, "ErrorResponse"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword errors: Dictionary of :code:`<ErrorResponse>`.
+        :paramtype errors: dict[str, ~azure.mgmt.machinelearningservices.models.ErrorResponse]
+        """
+        super(DeleteExperimentTagsResult, self).__init__(**kwargs)
+        self.errors = errors
+
+
+class DeleteOrModifyTags(msrest.serialization.Model):
+    """The Tags to modify or delete.
+
+    :ivar tags_to_modify: The KV pairs of tags to modify.
+    :vartype tags_to_modify: dict[str, str]
+    :ivar tags_to_delete: The list of tags to delete.
+    :vartype tags_to_delete: list[str]
+    """
+
+    _attribute_map = {
+        'tags_to_modify': {'key': 'tagsToModify', 'type': '{str}'},
+        'tags_to_delete': {'key': 'tagsToDelete', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        tags_to_modify: Optional[Dict[str, str]] = None,
+        tags_to_delete: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword tags_to_modify: The KV pairs of tags to modify.
+        :paramtype tags_to_modify: dict[str, str]
+        :keyword tags_to_delete: The list of tags to delete.
+        :paramtype tags_to_delete: list[str]
+        """
+        super(DeleteOrModifyTags, self).__init__(**kwargs)
+        self.tags_to_modify = tags_to_modify
+        self.tags_to_delete = tags_to_delete
+
+
+class DeleteRunServices(msrest.serialization.Model):
+    """The Services to delete.
+
+    :ivar services_to_delete: The list of Services to delete.
+    :vartype services_to_delete: list[str]
+    """
+
+    _attribute_map = {
+        'services_to_delete': {'key': 'servicesToDelete', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        services_to_delete: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword services_to_delete: The list of Services to delete.
+        :paramtype services_to_delete: list[str]
+        """
+        super(DeleteRunServices, self).__init__(**kwargs)
+        self.services_to_delete = services_to_delete
+
+
+class DeleteTagsCommand(msrest.serialization.Model):
+    """DeleteTagsCommand.
+
+    :ivar tags: A set of tags.
+    :vartype tags: list[str]
+    """
+
+    _attribute_map = {
+        'tags': {'key': 'tags', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        tags: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword tags: A set of tags.
+        :paramtype tags: list[str]
+        """
+        super(DeleteTagsCommand, self).__init__(**kwargs)
+        self.tags = tags
+
+
+class DerivedMetricKey(msrest.serialization.Model):
+    """DerivedMetricKey.
+
+    :ivar namespace:
+    :vartype namespace: str
+    :ivar name:
+    :vartype name: str
+    :ivar labels:
+    :vartype labels: list[str]
+    :ivar column_names:
+    :vartype column_names: list[str]
+    """
+
+    _validation = {
+        'labels': {'unique': True},
+        'column_names': {'unique': True},
+    }
+
+    _attribute_map = {
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'labels': {'key': 'labels', 'type': '[str]'},
+        'column_names': {'key': 'columnNames', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        namespace: Optional[str] = None,
+        name: Optional[str] = None,
+        labels: Optional[List[str]] = None,
+        column_names: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword namespace:
+        :paramtype namespace: str
+        :keyword name:
+        :paramtype name: str
+        :keyword labels:
+        :paramtype labels: list[str]
+        :keyword column_names:
+        :paramtype column_names: list[str]
+        """
+        super(DerivedMetricKey, self).__init__(**kwargs)
+        self.namespace = namespace
+        self.name = name
+        self.labels = labels
+        self.column_names = column_names
+
+
+class EndpointSetting(msrest.serialization.Model):
+    """EndpointSetting.
+
+    :ivar type:
+    :vartype type: str
+    :ivar port:
+    :vartype port: int
+    :ivar ssl_thumbprint:
+    :vartype ssl_thumbprint: str
+    :ivar endpoint:
+    :vartype endpoint: str
+    :ivar proxy_endpoint:
+    :vartype proxy_endpoint: str
+    :ivar status:
+    :vartype status: str
+    :ivar error_message:
+    :vartype error_message: str
+    :ivar enabled:
+    :vartype enabled: bool
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'ssl_thumbprint': {'key': 'sslThumbprint', 'type': 'str'},
+        'endpoint': {'key': 'endpoint', 'type': 'str'},
+        'proxy_endpoint': {'key': 'proxyEndpoint', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error_message': {'key': 'errorMessage', 'type': 'str'},
+        'enabled': {'key': 'enabled', 'type': 'bool'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        port: Optional[int] = None,
+        ssl_thumbprint: Optional[str] = None,
+        endpoint: Optional[str] = None,
+        proxy_endpoint: Optional[str] = None,
+        status: Optional[str] = None,
+        error_message: Optional[str] = None,
+        enabled: Optional[bool] = None,
+        properties: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword type:
+        :paramtype type: str
+        :keyword port:
+        :paramtype port: int
+        :keyword ssl_thumbprint:
+        :paramtype ssl_thumbprint: str
+        :keyword endpoint:
+        :paramtype endpoint: str
+        :keyword proxy_endpoint:
+        :paramtype proxy_endpoint: str
+        :keyword status:
+        :paramtype status: str
+        :keyword error_message:
+        :paramtype error_message: str
+        :keyword enabled:
+        :paramtype enabled: bool
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(EndpointSetting, self).__init__(**kwargs)
+        self.type = type
+        self.port = port
+        self.ssl_thumbprint = ssl_thumbprint
+        self.endpoint = endpoint
+        self.proxy_endpoint = proxy_endpoint
+        self.status = status
+        self.error_message = error_message
+        self.enabled = enabled
+        self.properties = properties
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+    """The resource management error additional info.
+
+    :ivar type: The additional info type.
+    :vartype type: str
+    :ivar info: The additional info.
+    :vartype info: any
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'info': {'key': 'info', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        info: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword type: The additional info type.
+        :paramtype type: str
+        :keyword info: The additional info.
+        :paramtype info: any
+        """
+        super(ErrorAdditionalInfo, self).__init__(**kwargs)
+        self.type = type
+        self.info = info
+
+
+class ErrorResponse(msrest.serialization.Model):
+    """The error response.
+
+    :ivar error: The root error.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.RootError
+    :ivar correlation: Dictionary containing correlation details for the error.
+    :vartype correlation: dict[str, str]
+    :ivar environment: The hosting environment.
+    :vartype environment: str
+    :ivar location: The Azure region.
+    :vartype location: str
+    :ivar time: The time in UTC.
+    :vartype time: ~datetime.datetime
+    :ivar component_name: Component name where error originated/encountered.
+    :vartype component_name: str
+    """
+
+    _attribute_map = {
+        'error': {'key': 'error', 'type': 'RootError'},
+        'correlation': {'key': 'correlation', 'type': '{str}'},
+        'environment': {'key': 'environment', 'type': 'str'},
+        'location': {'key': 'location', 'type': 'str'},
+        'time': {'key': 'time', 'type': 'iso-8601'},
+        'component_name': {'key': 'componentName', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        error: Optional["RootError"] = None,
+        correlation: Optional[Dict[str, str]] = None,
+        environment: Optional[str] = None,
+        location: Optional[str] = None,
+        time: Optional[datetime.datetime] = None,
+        component_name: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword error: The root error.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.RootError
+        :keyword correlation: Dictionary containing correlation details for the error.
+        :paramtype correlation: dict[str, str]
+        :keyword environment: The hosting environment.
+        :paramtype environment: str
+        :keyword location: The Azure region.
+        :paramtype location: str
+        :keyword time: The time in UTC.
+        :paramtype time: ~datetime.datetime
+        :keyword component_name: Component name where error originated/encountered.
+        :paramtype component_name: str
+        """
+        super(ErrorResponse, self).__init__(**kwargs)
+        self.error = error
+        self.correlation = correlation
+        self.environment = environment
+        self.location = location
+        self.time = time
+        self.component_name = component_name
+
+
+class Event(msrest.serialization.Model):
+    """Event.
+
+    :ivar name: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event name.
+    :vartype name: str
+    :ivar timestamp: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event timestamp.
+    :vartype timestamp: ~datetime.datetime
+    :ivar attributes: Gets the System.Collections.Generic.IDictionary`2 collection of attributes
+     associated with the event.
+    :vartype attributes: dict[str, any]
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
+        'attributes': {'key': 'attributes', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        timestamp: Optional[datetime.datetime] = None,
+        attributes: Optional[Dict[str, Any]] = None,
+        **kwargs
+    ):
+        """
+        :keyword name: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event name.
+        :paramtype name: str
+        :keyword timestamp: Gets the Microsoft.MachineLearning.RunHistory.Contracts.Event timestamp.
+        :paramtype timestamp: ~datetime.datetime
+        :keyword attributes: Gets the System.Collections.Generic.IDictionary`2 collection of attributes
+         associated with the event.
+        :paramtype attributes: dict[str, any]
+        """
+        super(Event, self).__init__(**kwargs)
+        self.name = name
+        self.timestamp = timestamp
+        self.attributes = attributes
+
+
+class Experiment(msrest.serialization.Model):
+    """Experiment.
+
+    :ivar experiment_id:
+    :vartype experiment_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar created_utc:
+    :vartype created_utc: ~datetime.datetime
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar archived_time:
+    :vartype archived_time: ~datetime.datetime
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar artifact_location:
+    :vartype artifact_location: str
+    """
+
+    _attribute_map = {
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'archived_time': {'key': 'archivedTime', 'type': 'iso-8601'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'artifact_location': {'key': 'artifactLocation', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        experiment_id: Optional[str] = None,
+        name: Optional[str] = None,
+        description: Optional[str] = None,
+        created_utc: Optional[datetime.datetime] = None,
+        tags: Optional[Dict[str, str]] = None,
+        archived_time: Optional[datetime.datetime] = None,
+        retain_for_lifetime_of_workspace: Optional[bool] = None,
+        artifact_location: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword experiment_id:
+        :paramtype experiment_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword created_utc:
+        :paramtype created_utc: ~datetime.datetime
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword archived_time:
+        :paramtype archived_time: ~datetime.datetime
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword artifact_location:
+        :paramtype artifact_location: str
+        """
+        super(Experiment, self).__init__(**kwargs)
+        self.experiment_id = experiment_id
+        self.name = name
+        self.description = description
+        self.created_utc = created_utc
+        self.tags = tags
+        self.archived_time = archived_time
+        self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
+        self.artifact_location = artifact_location
+
+
+class ExperimentQueryParams(msrest.serialization.Model):
+    """Extends Query Params DTO for ViewType.
+
+    :ivar view_type: ViewType filters experiments by their archived state. Default is ActiveOnly.
+     Possible values include: "Default", "All", "ActiveOnly", "ArchivedOnly".
+    :vartype view_type: str or ~azure.mgmt.machinelearningservices.models.ExperimentViewType
+    :ivar filter: Allows for filtering the collection of resources.
+     The expression specified is evaluated for each resource in the collection, and only items
+     where the expression evaluates to true are included in the response.
+     See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+     details on the expression syntax.
+    :vartype filter: str
+    :ivar continuation_token: The continuation token to use for getting the next set of resources.
+    :vartype continuation_token: str
+    :ivar order_by: The comma separated list of resource properties to use for sorting the
+     requested resources.
+     Optionally, can be followed by either 'asc' or 'desc'.
+    :vartype order_by: str
+    :ivar top: The maximum number of items in the resource collection to be included in the result.
+     If not specified, all items are returned.
+    :vartype top: int
+    """
+
+    _attribute_map = {
+        'view_type': {'key': 'viewType', 'type': 'str'},
+        'filter': {'key': 'filter', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'order_by': {'key': 'orderBy', 'type': 'str'},
+        'top': {'key': 'top', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        *,
+        view_type: Optional[Union[str, "ExperimentViewType"]] = None,
+        filter: Optional[str] = None,
+        continuation_token: Optional[str] = None,
+        order_by: Optional[str] = None,
+        top: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword view_type: ViewType filters experiments by their archived state. Default is
+         ActiveOnly. Possible values include: "Default", "All", "ActiveOnly", "ArchivedOnly".
+        :paramtype view_type: str or ~azure.mgmt.machinelearningservices.models.ExperimentViewType
+        :keyword filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+         See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+         details on the expression syntax.
+        :paramtype filter: str
+        :keyword continuation_token: The continuation token to use for getting the next set of
+         resources.
+        :paramtype continuation_token: str
+        :keyword order_by: The comma separated list of resource properties to use for sorting the
+         requested resources.
+         Optionally, can be followed by either 'asc' or 'desc'.
+        :paramtype order_by: str
+        :keyword top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :paramtype top: int
+        """
+        super(ExperimentQueryParams, self).__init__(**kwargs)
+        self.view_type = view_type
+        self.filter = filter
+        self.continuation_token = continuation_token
+        self.order_by = order_by
+        self.top = top
+
+
+class GetRunDataRequest(msrest.serialization.Model):
+    """GetRunDataRequest.
+
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar select_run_metadata:
+    :vartype select_run_metadata: bool
+    :ivar select_run_definition:
+    :vartype select_run_definition: bool
+    :ivar select_job_specification:
+    :vartype select_job_specification: bool
+    """
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'select_run_metadata': {'key': 'selectRunMetadata', 'type': 'bool'},
+        'select_run_definition': {'key': 'selectRunDefinition', 'type': 'bool'},
+        'select_job_specification': {'key': 'selectJobSpecification', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: Optional[str] = None,
+        select_run_metadata: Optional[bool] = None,
+        select_run_definition: Optional[bool] = None,
+        select_job_specification: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword select_run_metadata:
+        :paramtype select_run_metadata: bool
+        :keyword select_run_definition:
+        :paramtype select_run_definition: bool
+        :keyword select_job_specification:
+        :paramtype select_job_specification: bool
+        """
+        super(GetRunDataRequest, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.select_run_metadata = select_run_metadata
+        self.select_run_definition = select_run_definition
+        self.select_job_specification = select_job_specification
+
+
+class GetRunDataResult(msrest.serialization.Model):
+    """GetRunDataResult.
+
+    :ivar run_metadata: The definition of a Run.
+    :vartype run_metadata: ~azure.mgmt.machinelearningservices.models.Run
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    """
+
+    _attribute_map = {
+        'run_metadata': {'key': 'runMetadata', 'type': 'Run'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_metadata: Optional["Run"] = None,
+        run_definition: Optional[Any] = None,
+        job_specification: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_metadata: The definition of a Run.
+        :paramtype run_metadata: ~azure.mgmt.machinelearningservices.models.Run
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        """
+        super(GetRunDataResult, self).__init__(**kwargs)
+        self.run_metadata = run_metadata
+        self.run_definition = run_definition
+        self.job_specification = job_specification
+
+
+class GetRunsByIds(msrest.serialization.Model):
+    """GetRunsByIds.
+
+    :ivar run_ids:
+    :vartype run_ids: list[str]
+    """
+
+    _attribute_map = {
+        'run_ids': {'key': 'runIds', 'type': '[str]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_ids: Optional[List[str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_ids:
+        :paramtype run_ids: list[str]
+        """
+        super(GetRunsByIds, self).__init__(**kwargs)
+        self.run_ids = run_ids
+
+
+class GetSampledMetricRequest(msrest.serialization.Model):
+    """GetSampledMetricRequest.
+
+    :ivar metric_name:
+    :vartype metric_name: str
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    """
+
+    _attribute_map = {
+        'metric_name': {'key': 'metricName', 'type': 'str'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric_name: Optional[str] = None,
+        metric_namespace: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric_name:
+        :paramtype metric_name: str
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        """
+        super(GetSampledMetricRequest, self).__init__(**kwargs)
+        self.metric_name = metric_name
+        self.metric_namespace = metric_namespace
+
+
+class IMetricV2(msrest.serialization.Model):
+    """Sequence of one or many values sharing a common  DataContainerId, Name, and Schema. Used only for Post Metrics.
+
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value: The list of values.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    """
+
+    _attribute_map = {
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_container_id: Optional[str] = None,
+        name: Optional[str] = None,
+        columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None,
+        namespace: Optional[str] = None,
+        standard_schema_id: Optional[str] = None,
+        value: Optional[List["MetricV2Value"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value: The list of values.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        """
+        super(IMetricV2, self).__init__(**kwargs)
+        self.data_container_id = data_container_id
+        self.name = name
+        self.columns = columns
+        self.namespace = namespace
+        self.standard_schema_id = standard_schema_id
+        self.value = value
+
+
+class InnerErrorResponse(msrest.serialization.Model):
+    """A nested structure of errors.
+
+    :ivar code: The error code.
+    :vartype code: str
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        inner_error: Optional["InnerErrorResponse"] = None,
+        **kwargs
+    ):
+        """
+        :keyword code: The error code.
+        :paramtype code: str
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        """
+        super(InnerErrorResponse, self).__init__(**kwargs)
+        self.code = code
+        self.inner_error = inner_error
+
+
+class JobCost(msrest.serialization.Model):
+    """JobCost.
+
+    :ivar charged_cpu_core_seconds:
+    :vartype charged_cpu_core_seconds: float
+    :ivar charged_cpu_memory_megabyte_seconds:
+    :vartype charged_cpu_memory_megabyte_seconds: float
+    :ivar charged_gpu_seconds:
+    :vartype charged_gpu_seconds: float
+    :ivar charged_node_utilization_seconds:
+    :vartype charged_node_utilization_seconds: float
+    """
+
+    _attribute_map = {
+        'charged_cpu_core_seconds': {'key': 'chargedCpuCoreSeconds', 'type': 'float'},
+        'charged_cpu_memory_megabyte_seconds': {'key': 'chargedCpuMemoryMegabyteSeconds', 'type': 'float'},
+        'charged_gpu_seconds': {'key': 'chargedGpuSeconds', 'type': 'float'},
+        'charged_node_utilization_seconds': {'key': 'chargedNodeUtilizationSeconds', 'type': 'float'},
+    }
+
+    def __init__(
+        self,
+        *,
+        charged_cpu_core_seconds: Optional[float] = None,
+        charged_cpu_memory_megabyte_seconds: Optional[float] = None,
+        charged_gpu_seconds: Optional[float] = None,
+        charged_node_utilization_seconds: Optional[float] = None,
+        **kwargs
+    ):
+        """
+        :keyword charged_cpu_core_seconds:
+        :paramtype charged_cpu_core_seconds: float
+        :keyword charged_cpu_memory_megabyte_seconds:
+        :paramtype charged_cpu_memory_megabyte_seconds: float
+        :keyword charged_gpu_seconds:
+        :paramtype charged_gpu_seconds: float
+        :keyword charged_node_utilization_seconds:
+        :paramtype charged_node_utilization_seconds: float
+        """
+        super(JobCost, self).__init__(**kwargs)
+        self.charged_cpu_core_seconds = charged_cpu_core_seconds
+        self.charged_cpu_memory_megabyte_seconds = charged_cpu_memory_megabyte_seconds
+        self.charged_gpu_seconds = charged_gpu_seconds
+        self.charged_node_utilization_seconds = charged_node_utilization_seconds
+
+
+class KeyValuePairBaseEventErrorResponse(msrest.serialization.Model):
+    """KeyValuePairBaseEventErrorResponse.
+
+    :ivar key: Base event is the envelope used to post event data to the Event controller.
+    :vartype key: ~azure.mgmt.machinelearningservices.models.BaseEvent
+    :ivar value: The error response.
+    :vartype value: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'BaseEvent'},
+        'value': {'key': 'value', 'type': 'ErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        *,
+        key: Optional["BaseEvent"] = None,
+        value: Optional["ErrorResponse"] = None,
+        **kwargs
+    ):
+        """
+        :keyword key: Base event is the envelope used to post event data to the Event controller.
+        :paramtype key: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword value: The error response.
+        :paramtype value: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        """
+        super(KeyValuePairBaseEventErrorResponse, self).__init__(**kwargs)
+        self.key = key
+        self.value = value
+
+
+class KeyValuePairString(msrest.serialization.Model):
+    """KeyValuePairString.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value:
+    :vartype value: str
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        key: Optional[str] = None,
+        value: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value:
+        :paramtype value: str
+        """
+        super(KeyValuePairString, self).__init__(**kwargs)
+        self.key = key
+        self.value = value
+
+
+class KeyValuePairStringJToken(msrest.serialization.Model):
+    """KeyValuePairStringJToken.
+
+    :ivar key:
+    :vartype key: str
+    :ivar value: Anything.
+    :vartype value: any
+    """
+
+    _attribute_map = {
+        'key': {'key': 'key', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'object'},
+    }
+
+    def __init__(
+        self,
+        *,
+        key: Optional[str] = None,
+        value: Optional[Any] = None,
+        **kwargs
+    ):
+        """
+        :keyword key:
+        :paramtype key: str
+        :keyword value: Anything.
+        :paramtype value: any
+        """
+        super(KeyValuePairStringJToken, self).__init__(**kwargs)
+        self.key = key
+        self.value = value
+
+
+class Link(msrest.serialization.Model):
+    """Link.
+
+    :ivar context:
+    :vartype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+    :ivar attributes: Gets the collection of attributes associated with the link.
+    :vartype attributes: dict[str, any]
+    """
+
+    _attribute_map = {
+        'context': {'key': 'context', 'type': 'SpanContext'},
+        'attributes': {'key': 'attributes', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        context: Optional["SpanContext"] = None,
+        attributes: Optional[Dict[str, Any]] = None,
+        **kwargs
+    ):
+        """
+        :keyword context:
+        :paramtype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+        :keyword attributes: Gets the collection of attributes associated with the link.
+        :paramtype attributes: dict[str, any]
+        """
+        super(Link, self).__init__(**kwargs)
+        self.context = context
+        self.attributes = attributes
+
+
+class ListGenericResourceMetrics(msrest.serialization.Model):
+    """ListGenericResourceMetrics.
+
+    :ivar resource_id:
+    :vartype resource_id: str
+    :ivar metric_names:
+    :vartype metric_names: list[str]
+    :ivar label_filters: Dictionary of :code:`<string>`.
+    :vartype label_filters: dict[str, str]
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    """
+
+    _attribute_map = {
+        'resource_id': {'key': 'resourceId', 'type': 'str'},
+        'metric_names': {'key': 'metricNames', 'type': '[str]'},
+        'label_filters': {'key': 'labelFilters', 'type': '{str}'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        resource_id: Optional[str] = None,
+        metric_names: Optional[List[str]] = None,
+        label_filters: Optional[Dict[str, str]] = None,
+        metric_namespace: Optional[str] = None,
+        continuation_token: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword resource_id:
+        :paramtype resource_id: str
+        :keyword metric_names:
+        :paramtype metric_names: list[str]
+        :keyword label_filters: Dictionary of :code:`<string>`.
+        :paramtype label_filters: dict[str, str]
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        """
+        super(ListGenericResourceMetrics, self).__init__(**kwargs)
+        self.resource_id = resource_id
+        self.metric_names = metric_names
+        self.label_filters = label_filters
+        self.metric_namespace = metric_namespace
+        self.continuation_token = continuation_token
+
+
+class ListMetrics(msrest.serialization.Model):
+    """ListMetrics.
+
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    """
+
+    _attribute_map = {
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric_namespace: Optional[str] = None,
+        continuation_token: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        """
+        super(ListMetrics, self).__init__(**kwargs)
+        self.metric_namespace = metric_namespace
+        self.continuation_token = continuation_token
+
+
+class MetricDefinition(msrest.serialization.Model):
+    """MetricDefinition.
+
+    :ivar metric_key:
+    :vartype metric_key: ~azure.mgmt.machinelearningservices.models.DerivedMetricKey
+    :ivar columns: Dictionary of :code:`<MetricValueType>`.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    """
+
+    _attribute_map = {
+        'metric_key': {'key': 'metricKey', 'type': 'DerivedMetricKey'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric_key: Optional["DerivedMetricKey"] = None,
+        columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None,
+        properties: Optional["MetricProperties"] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric_key:
+        :paramtype metric_key: ~azure.mgmt.machinelearningservices.models.DerivedMetricKey
+        :keyword columns: Dictionary of :code:`<MetricValueType>`.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        """
+        super(MetricDefinition, self).__init__(**kwargs)
+        self.metric_key = metric_key
+        self.columns = columns
+        self.properties = properties
+
+
+class MetricProperties(msrest.serialization.Model):
+    """MetricProperties.
+
+    :ivar ux_metric_type: String value UX uses to decide how to render your metrics
+     Ex: azureml.v1.scalar or azureml.v1.table.
+    :vartype ux_metric_type: str
+    """
+
+    _attribute_map = {
+        'ux_metric_type': {'key': 'uxMetricType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        ux_metric_type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword ux_metric_type: String value UX uses to decide how to render your metrics
+         Ex: azureml.v1.scalar or azureml.v1.table.
+        :paramtype ux_metric_type: str
+        """
+        super(MetricProperties, self).__init__(**kwargs)
+        self.ux_metric_type = ux_metric_type
+
+
+class MetricSample(msrest.serialization.Model):
+    """MetricSample.
+
+    :ivar derived_label_values: Dictionary of :code:`<string>`.
+    :vartype derived_label_values: dict[str, str]
+    :ivar is_partial_result:
+    :vartype is_partial_result: bool
+    :ivar num_values_logged:
+    :vartype num_values_logged: long
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value:
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'derived_label_values': {'key': 'derivedLabelValues', 'type': '{str}'},
+        'is_partial_result': {'key': 'isPartialResult', 'type': 'bool'},
+        'num_values_logged': {'key': 'numValuesLogged', 'type': 'long'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        derived_label_values: Optional[Dict[str, str]] = None,
+        is_partial_result: Optional[bool] = None,
+        num_values_logged: Optional[int] = None,
+        data_container_id: Optional[str] = None,
+        name: Optional[str] = None,
+        columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None,
+        properties: Optional["MetricProperties"] = None,
+        namespace: Optional[str] = None,
+        standard_schema_id: Optional[str] = None,
+        value: Optional[List["MetricV2Value"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword derived_label_values: Dictionary of :code:`<string>`.
+        :paramtype derived_label_values: dict[str, str]
+        :keyword is_partial_result:
+        :paramtype is_partial_result: bool
+        :keyword num_values_logged:
+        :paramtype num_values_logged: long
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value:
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(MetricSample, self).__init__(**kwargs)
+        self.derived_label_values = derived_label_values
+        self.is_partial_result = is_partial_result
+        self.num_values_logged = num_values_logged
+        self.data_container_id = data_container_id
+        self.name = name
+        self.columns = columns
+        self.properties = properties
+        self.namespace = namespace
+        self.standard_schema_id = standard_schema_id
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class MetricSchema(msrest.serialization.Model):
+    """MetricSchema.
+
+    :ivar num_properties:
+    :vartype num_properties: int
+    :ivar properties:
+    :vartype properties: list[~azure.mgmt.machinelearningservices.models.MetricSchemaProperty]
+    """
+
+    _attribute_map = {
+        'num_properties': {'key': 'numProperties', 'type': 'int'},
+        'properties': {'key': 'properties', 'type': '[MetricSchemaProperty]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        num_properties: Optional[int] = None,
+        properties: Optional[List["MetricSchemaProperty"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword num_properties:
+        :paramtype num_properties: int
+        :keyword properties:
+        :paramtype properties: list[~azure.mgmt.machinelearningservices.models.MetricSchemaProperty]
+        """
+        super(MetricSchema, self).__init__(**kwargs)
+        self.num_properties = num_properties
+        self.properties = properties
+
+
+class MetricSchemaProperty(msrest.serialization.Model):
+    """MetricSchemaProperty.
+
+    :ivar property_id:
+    :vartype property_id: str
+    :ivar name:
+    :vartype name: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'property_id': {'key': 'propertyId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        property_id: Optional[str] = None,
+        name: Optional[str] = None,
+        type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword property_id:
+        :paramtype property_id: str
+        :keyword name:
+        :paramtype name: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(MetricSchemaProperty, self).__init__(**kwargs)
+        self.property_id = property_id
+        self.name = name
+        self.type = type
+
+
+class MetricV2(msrest.serialization.Model):
+    """Sequence of one or many values sharing a common DataContainerId, Name, and Schema.
+
+    :ivar data_container_id: Data container to which this Metric belongs.
+    :vartype data_container_id: str
+    :ivar name: Name identifying this Metric within the Data Container.
+    :vartype name: str
+    :ivar columns: Schema shared by all values under this Metric
+     Columns.Keys define the column names which are required for each MetricValue
+     Columns.Values define the type of the associated object for each column.
+    :vartype columns: dict[str, str or ~azure.mgmt.machinelearningservices.models.MetricValueType]
+    :ivar properties:
+    :vartype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+    :ivar namespace: Namespace for this Metric.
+    :vartype namespace: str
+    :ivar standard_schema_id:
+    :vartype standard_schema_id: str
+    :ivar value:
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'columns': {'key': 'columns', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': 'MetricProperties'},
+        'namespace': {'key': 'namespace', 'type': 'str'},
+        'standard_schema_id': {'key': 'standardSchemaId', 'type': 'str'},
+        'value': {'key': 'value', 'type': '[MetricV2Value]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        data_container_id: Optional[str] = None,
+        name: Optional[str] = None,
+        columns: Optional[Dict[str, Union[str, "MetricValueType"]]] = None,
+        properties: Optional["MetricProperties"] = None,
+        namespace: Optional[str] = None,
+        standard_schema_id: Optional[str] = None,
+        value: Optional[List["MetricV2Value"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword data_container_id: Data container to which this Metric belongs.
+        :paramtype data_container_id: str
+        :keyword name: Name identifying this Metric within the Data Container.
+        :paramtype name: str
+        :keyword columns: Schema shared by all values under this Metric
+         Columns.Keys define the column names which are required for each MetricValue
+         Columns.Values define the type of the associated object for each column.
+        :paramtype columns: dict[str, str or
+         ~azure.mgmt.machinelearningservices.models.MetricValueType]
+        :keyword properties:
+        :paramtype properties: ~azure.mgmt.machinelearningservices.models.MetricProperties
+        :keyword namespace: Namespace for this Metric.
+        :paramtype namespace: str
+        :keyword standard_schema_id:
+        :paramtype standard_schema_id: str
+        :keyword value:
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricV2Value]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(MetricV2, self).__init__(**kwargs)
+        self.data_container_id = data_container_id
+        self.name = name
+        self.columns = columns
+        self.properties = properties
+        self.namespace = namespace
+        self.standard_schema_id = standard_schema_id
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class MetricV2Value(msrest.serialization.Model):
+    """An individual value logged within a Metric.
+
+    :ivar metric_id: Unique Id for this metric value
+     Format is either a Guid or a Guid augmented with an additional int index for cases where
+     multiple metric values shared a
+       MetricId in the old schema.
+    :vartype metric_id: str
+    :ivar created_utc: Client specified timestamp for this metric value.
+    :vartype created_utc: ~datetime.datetime
+    :ivar step:
+    :vartype step: long
+    :ivar data: Dictionary mapping column names (specified as the keys in MetricV2Dto.Columns) to
+     values expressed in type associated
+     with that column in the metric's schema.
+    :vartype data: dict[str, any]
+    """
+
+    _attribute_map = {
+        'metric_id': {'key': 'metricId', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'step': {'key': 'step', 'type': 'long'},
+        'data': {'key': 'data', 'type': '{object}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric_id: Optional[str] = None,
+        created_utc: Optional[datetime.datetime] = None,
+        step: Optional[int] = None,
+        data: Optional[Dict[str, Any]] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric_id: Unique Id for this metric value
+         Format is either a Guid or a Guid augmented with an additional int index for cases where
+         multiple metric values shared a
+           MetricId in the old schema.
+        :paramtype metric_id: str
+        :keyword created_utc: Client specified timestamp for this metric value.
+        :paramtype created_utc: ~datetime.datetime
+        :keyword step:
+        :paramtype step: long
+        :keyword data: Dictionary mapping column names (specified as the keys in MetricV2Dto.Columns)
+         to values expressed in type associated
+         with that column in the metric's schema.
+        :paramtype data: dict[str, any]
+        """
+        super(MetricV2Value, self).__init__(**kwargs)
+        self.metric_id = metric_id
+        self.created_utc = created_utc
+        self.step = step
+        self.data = data
+
+
+class ModifyExperiment(msrest.serialization.Model):
+    """ModifyExperiment.
+
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar archive:
+    :vartype archive: bool
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'archive': {'key': 'archive', 'type': 'bool'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict[str, str]] = None,
+        archive: Optional[bool] = None,
+        retain_for_lifetime_of_workspace: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword archive:
+        :paramtype archive: bool
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        """
+        super(ModifyExperiment, self).__init__(**kwargs)
+        self.name = name
+        self.description = description
+        self.tags = tags
+        self.archive = archive
+        self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
+
+
+class OutputDatasetLineage(msrest.serialization.Model):
+    """OutputDatasetLineage.
+
+    :ivar identifier:
+    :vartype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+    :ivar output_type: Possible values include: "RunOutput", "Reference".
+    :vartype output_type: str or ~azure.mgmt.machinelearningservices.models.DatasetOutputType
+    :ivar output_details:
+    :vartype output_details: ~azure.mgmt.machinelearningservices.models.DatasetOutputDetails
+    """
+
+    _attribute_map = {
+        'identifier': {'key': 'identifier', 'type': 'DatasetIdentifier'},
+        'output_type': {'key': 'outputType', 'type': 'str'},
+        'output_details': {'key': 'outputDetails', 'type': 'DatasetOutputDetails'},
+    }
+
+    def __init__(
+        self,
+        *,
+        identifier: Optional["DatasetIdentifier"] = None,
+        output_type: Optional[Union[str, "DatasetOutputType"]] = None,
+        output_details: Optional["DatasetOutputDetails"] = None,
+        **kwargs
+    ):
+        """
+        :keyword identifier:
+        :paramtype identifier: ~azure.mgmt.machinelearningservices.models.DatasetIdentifier
+        :keyword output_type: Possible values include: "RunOutput", "Reference".
+        :paramtype output_type: str or ~azure.mgmt.machinelearningservices.models.DatasetOutputType
+        :keyword output_details:
+        :paramtype output_details: ~azure.mgmt.machinelearningservices.models.DatasetOutputDetails
+        """
+        super(OutputDatasetLineage, self).__init__(**kwargs)
+        self.identifier = identifier
+        self.output_type = output_type
+        self.output_details = output_details
+
+
+class PaginatedArtifactContentInformationList(msrest.serialization.Model):
+    """A paginated list of ArtifactContentInformations.
+
+    :ivar value: An array of objects of type ArtifactContentInformation.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[ArtifactContentInformation]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["ArtifactContentInformation"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type ArtifactContentInformation.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.ArtifactContentInformation]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedArtifactContentInformationList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedArtifactList(msrest.serialization.Model):
+    """A paginated list of Artifacts.
+
+    :ivar value: An array of objects of type Artifact.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Artifact]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Artifact]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["Artifact"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Artifact.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Artifact]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedArtifactList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedExperimentList(msrest.serialization.Model):
+    """A paginated list of Experiments.
+
+    :ivar value: An array of objects of type Experiment.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Experiment]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Experiment]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["Experiment"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Experiment.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Experiment]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedExperimentList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedMetricDefinitionList(msrest.serialization.Model):
+    """A paginated list of MetricDefinitions.
+
+    :ivar value: An array of objects of type MetricDefinition.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.MetricDefinition]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[MetricDefinition]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["MetricDefinition"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type MetricDefinition.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.MetricDefinition]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedMetricDefinitionList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedRunList(msrest.serialization.Model):
+    """A paginated list of Runs.
+
+    :ivar value: An array of objects of type Run.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.Run]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[Run]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["Run"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type Run.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.Run]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedRunList, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PaginatedSpanDefinition1List(msrest.serialization.Model):
+    """A paginated list of SpanDefinition`1s.
+
+    :ivar value: An array of objects of type SpanDefinition`1.
+    :vartype value: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+    :ivar continuation_token: The token used in retrieving the next page. If null, there are no
+     additional pages.
+    :vartype continuation_token: str
+    :ivar next_link: The link to the next page constructed using the continuationToken.  If null,
+     there are no additional pages.
+    :vartype next_link: str
+    """
+
+    _attribute_map = {
+        'value': {'key': 'value', 'type': '[SpanDefinition1]'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'next_link': {'key': 'nextLink', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        value: Optional[List["SpanDefinition1"]] = None,
+        continuation_token: Optional[str] = None,
+        next_link: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword value: An array of objects of type SpanDefinition`1.
+        :paramtype value: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+        :keyword continuation_token: The token used in retrieving the next page. If null, there are no
+         additional pages.
+        :paramtype continuation_token: str
+        :keyword next_link: The link to the next page constructed using the continuationToken.  If
+         null, there are no additional pages.
+        :paramtype next_link: str
+        """
+        super(PaginatedSpanDefinition1List, self).__init__(**kwargs)
+        self.value = value
+        self.continuation_token = continuation_token
+        self.next_link = next_link
+
+
+class PostRunMetricsError(msrest.serialization.Model):
+    """PostRunMetricsError.
+
+    :ivar metric: Sequence of one or many values sharing a common  DataContainerId, Name, and
+     Schema. Used only for Post Metrics.
+    :vartype metric: ~azure.mgmt.machinelearningservices.models.IMetricV2
+    :ivar error_response: The error response.
+    :vartype error_response: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    """
+
+    _attribute_map = {
+        'metric': {'key': 'metric', 'type': 'IMetricV2'},
+        'error_response': {'key': 'errorResponse', 'type': 'ErrorResponse'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric: Optional["IMetricV2"] = None,
+        error_response: Optional["ErrorResponse"] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric: Sequence of one or many values sharing a common  DataContainerId, Name, and
+         Schema. Used only for Post Metrics.
+        :paramtype metric: ~azure.mgmt.machinelearningservices.models.IMetricV2
+        :keyword error_response: The error response.
+        :paramtype error_response: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        """
+        super(PostRunMetricsError, self).__init__(**kwargs)
+        self.metric = metric
+        self.error_response = error_response
+
+
+class PostRunMetricsResult(msrest.serialization.Model):
+    """PostRunMetricsResult.
+
+    :ivar errors:
+    :vartype errors: list[~azure.mgmt.machinelearningservices.models.PostRunMetricsError]
+    """
+
+    _attribute_map = {
+        'errors': {'key': 'errors', 'type': '[PostRunMetricsError]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        errors: Optional[List["PostRunMetricsError"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword errors:
+        :paramtype errors: list[~azure.mgmt.machinelearningservices.models.PostRunMetricsError]
+        """
+        super(PostRunMetricsResult, self).__init__(**kwargs)
+        self.errors = errors
+
+
+class QueryParams(msrest.serialization.Model):
+    """The set of supported filters.
+
+    :ivar filter: Allows for filtering the collection of resources.
+     The expression specified is evaluated for each resource in the collection, and only items
+     where the expression evaluates to true are included in the response.
+     See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+     details on the expression syntax.
+    :vartype filter: str
+    :ivar continuation_token: The continuation token to use for getting the next set of resources.
+    :vartype continuation_token: str
+    :ivar order_by: The comma separated list of resource properties to use for sorting the
+     requested resources.
+     Optionally, can be followed by either 'asc' or 'desc'.
+    :vartype order_by: str
+    :ivar top: The maximum number of items in the resource collection to be included in the result.
+     If not specified, all items are returned.
+    :vartype top: int
+    """
+
+    _attribute_map = {
+        'filter': {'key': 'filter', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'order_by': {'key': 'orderBy', 'type': 'str'},
+        'top': {'key': 'top', 'type': 'int'},
+    }
+
+    def __init__(
+        self,
+        *,
+        filter: Optional[str] = None,
+        continuation_token: Optional[str] = None,
+        order_by: Optional[str] = None,
+        top: Optional[int] = None,
+        **kwargs
+    ):
+        """
+        :keyword filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+         See https://learn.microsoft.com/azure/search/query-odata-filter-orderby-syntax for
+         details on the expression syntax.
+        :paramtype filter: str
+        :keyword continuation_token: The continuation token to use for getting the next set of
+         resources.
+        :paramtype continuation_token: str
+        :keyword order_by: The comma separated list of resource properties to use for sorting the
+         requested resources.
+         Optionally, can be followed by either 'asc' or 'desc'.
+        :paramtype order_by: str
+        :keyword top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :paramtype top: int
+        """
+        super(QueryParams, self).__init__(**kwargs)
+        self.filter = filter
+        self.continuation_token = continuation_token
+        self.order_by = order_by
+        self.top = top
+
+
+class QueueingInfo(msrest.serialization.Model):
+    """QueueingInfo.
+
+    :ivar code:
+    :vartype code: str
+    :ivar message:
+    :vartype message: str
+    :ivar last_refresh_timestamp:
+    :vartype last_refresh_timestamp: ~datetime.datetime
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'message': {'key': 'message', 'type': 'str'},
+        'last_refresh_timestamp': {'key': 'lastRefreshTimestamp', 'type': 'iso-8601'},
+    }
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        message: Optional[str] = None,
+        last_refresh_timestamp: Optional[datetime.datetime] = None,
+        **kwargs
+    ):
+        """
+        :keyword code:
+        :paramtype code: str
+        :keyword message:
+        :paramtype message: str
+        :keyword last_refresh_timestamp:
+        :paramtype last_refresh_timestamp: ~datetime.datetime
+        """
+        super(QueueingInfo, self).__init__(**kwargs)
+        self.code = code
+        self.message = message
+        self.last_refresh_timestamp = last_refresh_timestamp
+
+
+class RetrieveFullFidelityMetricRequest(msrest.serialization.Model):
+    """RetrieveFullFidelityMetricRequest.
+
+    :ivar metric_name:
+    :vartype metric_name: str
+    :ivar continuation_token:
+    :vartype continuation_token: str
+    :ivar start_time:
+    :vartype start_time: ~datetime.datetime
+    :ivar end_time:
+    :vartype end_time: ~datetime.datetime
+    :ivar metric_namespace:
+    :vartype metric_namespace: str
+    """
+
+    _attribute_map = {
+        'metric_name': {'key': 'metricName', 'type': 'str'},
+        'continuation_token': {'key': 'continuationToken', 'type': 'str'},
+        'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+        'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+        'metric_namespace': {'key': 'metricNamespace', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        metric_name: Optional[str] = None,
+        continuation_token: Optional[str] = None,
+        start_time: Optional[datetime.datetime] = None,
+        end_time: Optional[datetime.datetime] = None,
+        metric_namespace: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword metric_name:
+        :paramtype metric_name: str
+        :keyword continuation_token:
+        :paramtype continuation_token: str
+        :keyword start_time:
+        :paramtype start_time: ~datetime.datetime
+        :keyword end_time:
+        :paramtype end_time: ~datetime.datetime
+        :keyword metric_namespace:
+        :paramtype metric_namespace: str
+        """
+        super(RetrieveFullFidelityMetricRequest, self).__init__(**kwargs)
+        self.metric_name = metric_name
+        self.continuation_token = continuation_token
+        self.start_time = start_time
+        self.end_time = end_time
+        self.metric_namespace = metric_namespace
+
+
+class RootError(msrest.serialization.Model):
+    """The root error.
+
+    :ivar code: The service-defined error code. Supported error codes: ServiceError, UserError,
+     ValidationError, AzureStorageError, TransientError, RequestThrottled.
+    :vartype code: str
+    :ivar severity: The Severity of error.
+    :vartype severity: int
+    :ivar message: A human-readable representation of the error.
+    :vartype message: str
+    :ivar message_format: An unformatted version of the message with no variable substitution.
+    :vartype message_format: str
+    :ivar message_parameters: Value substitutions corresponding to the contents of MessageFormat.
+    :vartype message_parameters: dict[str, str]
+    :ivar reference_code: This code can optionally be set by the system generating the error.
+     It should be used to classify the problem and identify the module and code area where the
+     failure occured.
+    :vartype reference_code: str
+    :ivar details_uri: A URI which points to more details about the context of the error.
+    :vartype details_uri: str
+    :ivar target: The target of the error (e.g., the name of the property in error).
+    :vartype target: str
+    :ivar details: The related errors that occurred during the request.
+    :vartype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+    :ivar inner_error: A nested structure of errors.
+    :vartype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+    :ivar additional_info: The error additional info.
+    :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+    """
+
+    _attribute_map = {
+        'code': {'key': 'code', 'type': 'str'},
+        'severity': {'key': 'severity', 'type': 'int'},
+        'message': {'key': 'message', 'type': 'str'},
+        'message_format': {'key': 'messageFormat', 'type': 'str'},
+        'message_parameters': {'key': 'messageParameters', 'type': '{str}'},
+        'reference_code': {'key': 'referenceCode', 'type': 'str'},
+        'details_uri': {'key': 'detailsUri', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'details': {'key': 'details', 'type': '[RootError]'},
+        'inner_error': {'key': 'innerError', 'type': 'InnerErrorResponse'},
+        'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        code: Optional[str] = None,
+        severity: Optional[int] = None,
+        message: Optional[str] = None,
+        message_format: Optional[str] = None,
+        message_parameters: Optional[Dict[str, str]] = None,
+        reference_code: Optional[str] = None,
+        details_uri: Optional[str] = None,
+        target: Optional[str] = None,
+        details: Optional[List["RootError"]] = None,
+        inner_error: Optional["InnerErrorResponse"] = None,
+        additional_info: Optional[List["ErrorAdditionalInfo"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword code: The service-defined error code. Supported error codes: ServiceError, UserError,
+         ValidationError, AzureStorageError, TransientError, RequestThrottled.
+        :paramtype code: str
+        :keyword severity: The Severity of error.
+        :paramtype severity: int
+        :keyword message: A human-readable representation of the error.
+        :paramtype message: str
+        :keyword message_format: An unformatted version of the message with no variable substitution.
+        :paramtype message_format: str
+        :keyword message_parameters: Value substitutions corresponding to the contents of
+         MessageFormat.
+        :paramtype message_parameters: dict[str, str]
+        :keyword reference_code: This code can optionally be set by the system generating the error.
+         It should be used to classify the problem and identify the module and code area where the
+         failure occured.
+        :paramtype reference_code: str
+        :keyword details_uri: A URI which points to more details about the context of the error.
+        :paramtype details_uri: str
+        :keyword target: The target of the error (e.g., the name of the property in error).
+        :paramtype target: str
+        :keyword details: The related errors that occurred during the request.
+        :paramtype details: list[~azure.mgmt.machinelearningservices.models.RootError]
+        :keyword inner_error: A nested structure of errors.
+        :paramtype inner_error: ~azure.mgmt.machinelearningservices.models.InnerErrorResponse
+        :keyword additional_info: The error additional info.
+        :paramtype additional_info:
+         list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo]
+        """
+        super(RootError, self).__init__(**kwargs)
+        self.code = code
+        self.severity = severity
+        self.message = message
+        self.message_format = message_format
+        self.message_parameters = message_parameters
+        self.reference_code = reference_code
+        self.details_uri = details_uri
+        self.target = target
+        self.details = details
+        self.inner_error = inner_error
+        self.additional_info = additional_info
+
+
+class Run(msrest.serialization.Model):
+    """The definition of a Run.
+
+    :ivar run_number:
+    :vartype run_number: int
+    :ivar root_run_id:
+    :vartype root_run_id: str
+    :ivar created_utc: The time the run was created in UTC.
+    :vartype created_utc: ~datetime.datetime
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar user_id: The Id of the user that created the run.
+    :vartype user_id: str
+    :ivar token: A token used for authenticating a run.
+    :vartype token: str
+    :ivar token_expiry_time_utc: The Token expiration time in UTC.
+    :vartype token_expiry_time_utc: ~datetime.datetime
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar warnings: A list of warnings that occurred during the run.
+    :vartype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+    :ivar revision:
+    :vartype revision: long
+    :ivar status_revision:
+    :vartype status_revision: long
+    :ivar run_uuid: A system generated Id for the run.
+    :vartype run_uuid: str
+    :ivar parent_run_uuid: A system generated Id for the run's parent.
+    :vartype parent_run_uuid: str
+    :ivar root_run_uuid: A system generated Id for the root of the run's hierarchy.
+    :vartype root_run_uuid: str
+    :ivar has_virtual_parent: Indicates if this is a child of a virtual run.
+    :vartype has_virtual_parent: bool
+    :ivar last_start_time_utc: The last timestamp when a run transitioned from paused to running.
+     Initialized when StartTimeUtc is first set.
+    :vartype last_start_time_utc: ~datetime.datetime
+    :ivar current_compute_time: The cumulative time spent in an active status for an active run.
+    :vartype current_compute_time: str
+    :ivar compute_duration: The cumulative time spent in an active status for a terminal run.
+    :vartype compute_duration: str
+    :ivar effective_start_time_utc: A relative start time set as LastStartTimeUtc - ComputeTime for
+     active runs. This allows sorting active runs on how long they have been active, since an actual
+     active duration cannot be frequently updated.
+    :vartype effective_start_time_utc: ~datetime.datetime
+    :ivar last_modified_by:
+    :vartype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar last_modified_utc: The time the run was created in UTC.
+    :vartype last_modified_utc: ~datetime.datetime
+    :ivar duration: The total duration of a run.
+    :vartype duration: str
+    :ivar cancelation_reason: The cancelation Reason if the run was canceled.
+    :vartype cancelation_reason: str
+    :ivar run_id: The identifier for the run. Run IDs must be less than 256 characters and contain
+     only alphanumeric characters with dashes and underscores.
+    :vartype run_id: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+    :vartype parent_run_id: str
+    :ivar experiment_id: The Id of the experiment that created this run.
+    :vartype experiment_id: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar options:
+    :vartype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+    :ivar is_virtual: A virtual run can set an active child run that will override the virtual run
+     status and properties.
+    :vartype is_virtual: bool
+    :ivar display_name:
+    :vartype display_name: str
+    :ivar name:
+    :vartype name: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar description:
+    :vartype description: str
+    :ivar hidden:
+    :vartype hidden: bool
+    :ivar run_type:
+    :vartype run_type: str
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    :ivar parameters: Dictionary of :code:`<any>`.
+    :vartype parameters: dict[str, any]
+    :ivar action_uris: Dictionary of :code:`<string>`.
+    :vartype action_uris: dict[str, str]
+    :ivar script_name:
+    :vartype script_name: str
+    :ivar target:
+    :vartype target: str
+    :ivar unique_child_run_compute_targets:
+    :vartype unique_child_run_compute_targets: list[str]
+    :ivar tags: A set of tags. Dictionary of :code:`<string>`.
+    :vartype tags: dict[str, str]
+    :ivar settings: Dictionary of :code:`<string>`.
+    :vartype settings: dict[str, str]
+    :ivar services: Dictionary of :code:`<EndpointSetting>`.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets:
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets:
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: Anything.
+    :vartype run_definition: any
+    :ivar job_specification: Anything.
+    :vartype job_specification: any
+    :ivar primary_metric_name:
+    :vartype primary_metric_name: str
+    :ivar created_from:
+    :vartype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+    :ivar cancel_uri:
+    :vartype cancel_uri: str
+    :ivar complete_uri:
+    :vartype complete_uri: str
+    :ivar diagnostics_uri:
+    :vartype diagnostics_uri: str
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar retain_for_lifetime_of_workspace:
+    :vartype retain_for_lifetime_of_workspace: bool
+    :ivar queueing_info:
+    :vartype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+    :ivar active_child_run_id: The RunId of the active child on a virtual run.
+    :vartype active_child_run_id: str
+    :ivar inputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: Dictionary of :code:`<TypedAssetReference>`.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'unique_child_run_compute_targets': {'unique': True},
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_number': {'key': 'runNumber', 'type': 'int'},
+        'root_run_id': {'key': 'rootRunId', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'user_id': {'key': 'userId', 'type': 'str'},
+        'token': {'key': 'token', 'type': 'str'},
+        'token_expiry_time_utc': {'key': 'tokenExpiryTimeUtc', 'type': 'iso-8601'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'warnings': {'key': 'warnings', 'type': '[RunDetailsWarning]'},
+        'revision': {'key': 'revision', 'type': 'long'},
+        'status_revision': {'key': 'statusRevision', 'type': 'long'},
+        'run_uuid': {'key': 'runUuid', 'type': 'str'},
+        'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
+        'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
+        'has_virtual_parent': {'key': 'hasVirtualParent', 'type': 'bool'},
+        'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
+        'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
+        'compute_duration': {'key': 'computeDuration', 'type': 'str'},
+        'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
+        'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
+        'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
+        'duration': {'key': 'duration', 'type': 'str'},
+        'cancelation_reason': {'key': 'cancelationReason', 'type': 'str'},
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'experiment_id': {'key': 'experimentId', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'options': {'key': 'options', 'type': 'RunOptions'},
+        'is_virtual': {'key': 'isVirtual', 'type': 'bool'},
+        'display_name': {'key': 'displayName', 'type': 'str'},
+        'name': {'key': 'name', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'hidden': {'key': 'hidden', 'type': 'bool'},
+        'run_type': {'key': 'runType', 'type': 'str'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'action_uris': {'key': 'actionUris', 'type': '{str}'},
+        'script_name': {'key': 'scriptName', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'unique_child_run_compute_targets': {'key': 'uniqueChildRunComputeTargets', 'type': '[str]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'job_specification': {'key': 'jobSpecification', 'type': 'object'},
+        'primary_metric_name': {'key': 'primaryMetricName', 'type': 'str'},
+        'created_from': {'key': 'createdFrom', 'type': 'CreatedFrom'},
+        'cancel_uri': {'key': 'cancelUri', 'type': 'str'},
+        'complete_uri': {'key': 'completeUri', 'type': 'str'},
+        'diagnostics_uri': {'key': 'diagnosticsUri', 'type': 'str'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'retain_for_lifetime_of_workspace': {'key': 'retainForLifetimeOfWorkspace', 'type': 'bool'},
+        'queueing_info': {'key': 'queueingInfo', 'type': 'QueueingInfo'},
+        'active_child_run_id': {'key': 'activeChildRunId', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_number: Optional[int] = None,
+        root_run_id: Optional[str] = None,
+        created_utc: Optional[datetime.datetime] = None,
+        created_by: Optional["User"] = None,
+        user_id: Optional[str] = None,
+        token: Optional[str] = None,
+        token_expiry_time_utc: Optional[datetime.datetime] = None,
+        error: Optional["ErrorResponse"] = None,
+        warnings: Optional[List["RunDetailsWarning"]] = None,
+        revision: Optional[int] = None,
+        status_revision: Optional[int] = None,
+        run_uuid: Optional[str] = None,
+        parent_run_uuid: Optional[str] = None,
+        root_run_uuid: Optional[str] = None,
+        has_virtual_parent: Optional[bool] = None,
+        last_start_time_utc: Optional[datetime.datetime] = None,
+        current_compute_time: Optional[str] = None,
+        compute_duration: Optional[str] = None,
+        effective_start_time_utc: Optional[datetime.datetime] = None,
+        last_modified_by: Optional["User"] = None,
+        last_modified_utc: Optional[datetime.datetime] = None,
+        duration: Optional[str] = None,
+        cancelation_reason: Optional[str] = None,
+        run_id: Optional[str] = None,
+        parent_run_id: Optional[str] = None,
+        experiment_id: Optional[str] = None,
+        status: Optional[str] = None,
+        start_time_utc: Optional[datetime.datetime] = None,
+        end_time_utc: Optional[datetime.datetime] = None,
+        options: Optional["RunOptions"] = None,
+        is_virtual: Optional[bool] = None,
+        display_name: Optional[str] = None,
+        name: Optional[str] = None,
+        data_container_id: Optional[str] = None,
+        description: Optional[str] = None,
+        hidden: Optional[bool] = None,
+        run_type: Optional[str] = None,
+        run_type_v2: Optional["RunTypeV2"] = None,
+        properties: Optional[Dict[str, str]] = None,
+        parameters: Optional[Dict[str, Any]] = None,
+        action_uris: Optional[Dict[str, str]] = None,
+        script_name: Optional[str] = None,
+        target: Optional[str] = None,
+        unique_child_run_compute_targets: Optional[List[str]] = None,
+        tags: Optional[Dict[str, str]] = None,
+        settings: Optional[Dict[str, str]] = None,
+        services: Optional[Dict[str, "EndpointSetting"]] = None,
+        input_datasets: Optional[List["DatasetLineage"]] = None,
+        output_datasets: Optional[List["OutputDatasetLineage"]] = None,
+        run_definition: Optional[Any] = None,
+        job_specification: Optional[Any] = None,
+        primary_metric_name: Optional[str] = None,
+        created_from: Optional["CreatedFrom"] = None,
+        cancel_uri: Optional[str] = None,
+        complete_uri: Optional[str] = None,
+        diagnostics_uri: Optional[str] = None,
+        compute_request: Optional["ComputeRequest"] = None,
+        compute: Optional["Compute"] = None,
+        retain_for_lifetime_of_workspace: Optional[bool] = None,
+        queueing_info: Optional["QueueingInfo"] = None,
+        active_child_run_id: Optional[str] = None,
+        inputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        outputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_number:
+        :paramtype run_number: int
+        :keyword root_run_id:
+        :paramtype root_run_id: str
+        :keyword created_utc: The time the run was created in UTC.
+        :paramtype created_utc: ~datetime.datetime
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword user_id: The Id of the user that created the run.
+        :paramtype user_id: str
+        :keyword token: A token used for authenticating a run.
+        :paramtype token: str
+        :keyword token_expiry_time_utc: The Token expiration time in UTC.
+        :paramtype token_expiry_time_utc: ~datetime.datetime
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword warnings: A list of warnings that occurred during the run.
+        :paramtype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+        :keyword revision:
+        :paramtype revision: long
+        :keyword status_revision:
+        :paramtype status_revision: long
+        :keyword run_uuid: A system generated Id for the run.
+        :paramtype run_uuid: str
+        :keyword parent_run_uuid: A system generated Id for the run's parent.
+        :paramtype parent_run_uuid: str
+        :keyword root_run_uuid: A system generated Id for the root of the run's hierarchy.
+        :paramtype root_run_uuid: str
+        :keyword has_virtual_parent: Indicates if this is a child of a virtual run.
+        :paramtype has_virtual_parent: bool
+        :keyword last_start_time_utc: The last timestamp when a run transitioned from paused to
+         running. Initialized when StartTimeUtc is first set.
+        :paramtype last_start_time_utc: ~datetime.datetime
+        :keyword current_compute_time: The cumulative time spent in an active status for an active run.
+        :paramtype current_compute_time: str
+        :keyword compute_duration: The cumulative time spent in an active status for a terminal run.
+        :paramtype compute_duration: str
+        :keyword effective_start_time_utc: A relative start time set as LastStartTimeUtc - ComputeTime
+         for active runs. This allows sorting active runs on how long they have been active, since an
+         actual active duration cannot be frequently updated.
+        :paramtype effective_start_time_utc: ~datetime.datetime
+        :keyword last_modified_by:
+        :paramtype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword last_modified_utc: The time the run was created in UTC.
+        :paramtype last_modified_utc: ~datetime.datetime
+        :keyword duration: The total duration of a run.
+        :paramtype duration: str
+        :keyword cancelation_reason: The cancelation Reason if the run was canceled.
+        :paramtype cancelation_reason: str
+        :keyword run_id: The identifier for the run. Run IDs must be less than 256 characters and
+         contain only alphanumeric characters with dashes and underscores.
+        :paramtype run_id: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical; otherwise, Null.
+        :paramtype parent_run_id: str
+        :keyword experiment_id: The Id of the experiment that created this run.
+        :paramtype experiment_id: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword options:
+        :paramtype options: ~azure.mgmt.machinelearningservices.models.RunOptions
+        :keyword is_virtual: A virtual run can set an active child run that will override the virtual
+         run status and properties.
+        :paramtype is_virtual: bool
+        :keyword display_name:
+        :paramtype display_name: str
+        :keyword name:
+        :paramtype name: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword description:
+        :paramtype description: str
+        :keyword hidden:
+        :paramtype hidden: bool
+        :keyword run_type:
+        :paramtype run_type: str
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: Dictionary of :code:`<any>`.
+        :paramtype parameters: dict[str, any]
+        :keyword action_uris: Dictionary of :code:`<string>`.
+        :paramtype action_uris: dict[str, str]
+        :keyword script_name:
+        :paramtype script_name: str
+        :keyword target:
+        :paramtype target: str
+        :keyword unique_child_run_compute_targets:
+        :paramtype unique_child_run_compute_targets: list[str]
+        :keyword tags: A set of tags. Dictionary of :code:`<string>`.
+        :paramtype tags: dict[str, str]
+        :keyword settings: Dictionary of :code:`<string>`.
+        :paramtype settings: dict[str, str]
+        :keyword services: Dictionary of :code:`<EndpointSetting>`.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets:
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets:
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: Anything.
+        :paramtype run_definition: any
+        :keyword job_specification: Anything.
+        :paramtype job_specification: any
+        :keyword primary_metric_name:
+        :paramtype primary_metric_name: str
+        :keyword created_from:
+        :paramtype created_from: ~azure.mgmt.machinelearningservices.models.CreatedFrom
+        :keyword cancel_uri:
+        :paramtype cancel_uri: str
+        :keyword complete_uri:
+        :paramtype complete_uri: str
+        :keyword diagnostics_uri:
+        :paramtype diagnostics_uri: str
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword retain_for_lifetime_of_workspace:
+        :paramtype retain_for_lifetime_of_workspace: bool
+        :keyword queueing_info:
+        :paramtype queueing_info: ~azure.mgmt.machinelearningservices.models.QueueingInfo
+        :keyword active_child_run_id: The RunId of the active child on a virtual run.
+        :paramtype active_child_run_id: str
+        :keyword inputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: Dictionary of :code:`<TypedAssetReference>`.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(Run, self).__init__(**kwargs)
+        self.run_number = run_number
+        self.root_run_id = root_run_id
+        self.created_utc = created_utc
+        self.created_by = created_by
+        self.user_id = user_id
+        self.token = token
+        self.token_expiry_time_utc = token_expiry_time_utc
+        self.error = error
+        self.warnings = warnings
+        self.revision = revision
+        self.status_revision = status_revision
+        self.run_uuid = run_uuid
+        self.parent_run_uuid = parent_run_uuid
+        self.root_run_uuid = root_run_uuid
+        self.has_virtual_parent = has_virtual_parent
+        self.last_start_time_utc = last_start_time_utc
+        self.current_compute_time = current_compute_time
+        self.compute_duration = compute_duration
+        self.effective_start_time_utc = effective_start_time_utc
+        self.last_modified_by = last_modified_by
+        self.last_modified_utc = last_modified_utc
+        self.duration = duration
+        self.cancelation_reason = cancelation_reason
+        self.run_id = run_id
+        self.parent_run_id = parent_run_id
+        self.experiment_id = experiment_id
+        self.status = status
+        self.start_time_utc = start_time_utc
+        self.end_time_utc = end_time_utc
+        self.options = options
+        self.is_virtual = is_virtual
+        self.display_name = display_name
+        self.name = name
+        self.data_container_id = data_container_id
+        self.description = description
+        self.hidden = hidden
+        self.run_type = run_type
+        self.run_type_v2 = run_type_v2
+        self.properties = properties
+        self.parameters = parameters
+        self.action_uris = action_uris
+        self.script_name = script_name
+        self.target = target
+        self.unique_child_run_compute_targets = unique_child_run_compute_targets
+        self.tags = tags
+        self.settings = settings
+        self.services = services
+        self.input_datasets = input_datasets
+        self.output_datasets = output_datasets
+        self.run_definition = run_definition
+        self.job_specification = job_specification
+        self.primary_metric_name = primary_metric_name
+        self.created_from = created_from
+        self.cancel_uri = cancel_uri
+        self.complete_uri = complete_uri
+        self.diagnostics_uri = diagnostics_uri
+        self.compute_request = compute_request
+        self.compute = compute
+        self.retain_for_lifetime_of_workspace = retain_for_lifetime_of_workspace
+        self.queueing_info = queueing_info
+        self.active_child_run_id = active_child_run_id
+        self.inputs = inputs
+        self.outputs = outputs
+
+
+class RunDetails(msrest.serialization.Model):
+    """The details of the run.
+
+    :ivar run_id: The identifier for the run.
+    :vartype run_id: str
+    :ivar run_uuid: A system generated Id for the run.
+    :vartype run_uuid: str
+    :ivar parent_run_uuid: A system generated Id for the run's parent.
+    :vartype parent_run_uuid: str
+    :ivar root_run_uuid: A system generated Id for the root of the run's hierarchy.
+    :vartype root_run_uuid: str
+    :ivar target: The name of the compute target where the run is executed.
+    :vartype target: str
+    :ivar status: The status of the run. The Status string value maps to the RunStatus Enum.
+    :vartype status: str
+    :ivar parent_run_id: The parent of the run if the run is hierarchical.
+    :vartype parent_run_id: str
+    :ivar created_time_utc: The creation time of the run in UTC.
+    :vartype created_time_utc: ~datetime.datetime
+    :ivar start_time_utc: The start time of the run in UTC.
+    :vartype start_time_utc: ~datetime.datetime
+    :ivar end_time_utc: The end time of the run in UTC.
+    :vartype end_time_utc: ~datetime.datetime
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar warnings: A list of warnings that occurred during the run.
+    :vartype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+    :ivar tags: A set of tags. The tag dictionary for the run. Tags are mutable.
+    :vartype tags: dict[str, str]
+    :ivar properties: The properties dictionary for the run. Properties are immutable.
+    :vartype properties: dict[str, str]
+    :ivar parameters: The parameters dictionary for the run. Parameters are immutable.
+    :vartype parameters: dict[str, any]
+    :ivar services: The interactive run services for a run. Services are mutable.
+    :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+    :ivar input_datasets: A list of dataset used as input to the run.
+    :vartype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+    :ivar output_datasets: A list of dataset used as output to the run.
+    :vartype output_datasets: list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+    :ivar run_definition: The run definition specification.
+    :vartype run_definition: any
+    :ivar log_files: Dictionary of :code:`<string>`.
+    :vartype log_files: dict[str, str]
+    :ivar job_cost:
+    :vartype job_cost: ~azure.mgmt.machinelearningservices.models.JobCost
+    :ivar revision:
+    :vartype revision: long
+    :ivar run_type_v2:
+    :vartype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+    :ivar settings: The run settings.
+    :vartype settings: dict[str, str]
+    :ivar compute_request:
+    :vartype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+    :ivar compute:
+    :vartype compute: ~azure.mgmt.machinelearningservices.models.Compute
+    :ivar created_by:
+    :vartype created_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar compute_duration: Time spent in an active state for terminal runs.
+    :vartype compute_duration: str
+    :ivar effective_start_time_utc: Relative start time of active runs for ordering and computing
+     active compute duration.
+     Compute duration of an active run is now() - EffectiveStartTimeUtc.
+    :vartype effective_start_time_utc: ~datetime.datetime
+    :ivar run_number: Relative start time of active runs for ordering and computing active compute
+     duration.
+     Compute duration of an active run is now() - EffectiveStartTimeUtc.
+    :vartype run_number: int
+    :ivar root_run_id:
+    :vartype root_run_id: str
+    :ivar user_id: The Id of the user that created the run.
+    :vartype user_id: str
+    :ivar status_revision:
+    :vartype status_revision: long
+    :ivar has_virtual_parent: Indicates if this is a child of a virtual run.
+    :vartype has_virtual_parent: bool
+    :ivar current_compute_time: The cumulative time spent in an active status for an active run.
+    :vartype current_compute_time: str
+    :ivar last_start_time_utc: The last timestamp when a run transitioned from paused to running.
+     Initialized when StartTimeUtc is first set.
+    :vartype last_start_time_utc: ~datetime.datetime
+    :ivar last_modified_by:
+    :vartype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+    :ivar last_modified_utc: The time the run was created in UTC.
+    :vartype last_modified_utc: ~datetime.datetime
+    :ivar duration: The total duration of a run.
+    :vartype duration: str
+    :ivar inputs: The inputs for the run.
+    :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    :ivar outputs: The outputs for the run.
+    :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+    """
+
+    _validation = {
+        'input_datasets': {'unique': True},
+        'output_datasets': {'unique': True},
+    }
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'run_uuid': {'key': 'runUuid', 'type': 'str'},
+        'parent_run_uuid': {'key': 'parentRunUuid', 'type': 'str'},
+        'root_run_uuid': {'key': 'rootRunUuid', 'type': 'str'},
+        'target': {'key': 'target', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'parent_run_id': {'key': 'parentRunId', 'type': 'str'},
+        'created_time_utc': {'key': 'createdTimeUtc', 'type': 'iso-8601'},
+        'start_time_utc': {'key': 'startTimeUtc', 'type': 'iso-8601'},
+        'end_time_utc': {'key': 'endTimeUtc', 'type': 'iso-8601'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'warnings': {'key': 'warnings', 'type': '[RunDetailsWarning]'},
+        'tags': {'key': 'tags', 'type': '{str}'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+        'parameters': {'key': 'parameters', 'type': '{object}'},
+        'services': {'key': 'services', 'type': '{EndpointSetting}'},
+        'input_datasets': {'key': 'inputDatasets', 'type': '[DatasetLineage]'},
+        'output_datasets': {'key': 'outputDatasets', 'type': '[OutputDatasetLineage]'},
+        'run_definition': {'key': 'runDefinition', 'type': 'object'},
+        'log_files': {'key': 'logFiles', 'type': '{str}'},
+        'job_cost': {'key': 'jobCost', 'type': 'JobCost'},
+        'revision': {'key': 'revision', 'type': 'long'},
+        'run_type_v2': {'key': 'runTypeV2', 'type': 'RunTypeV2'},
+        'settings': {'key': 'settings', 'type': '{str}'},
+        'compute_request': {'key': 'computeRequest', 'type': 'ComputeRequest'},
+        'compute': {'key': 'compute', 'type': 'Compute'},
+        'created_by': {'key': 'createdBy', 'type': 'User'},
+        'compute_duration': {'key': 'computeDuration', 'type': 'str'},
+        'effective_start_time_utc': {'key': 'effectiveStartTimeUtc', 'type': 'iso-8601'},
+        'run_number': {'key': 'runNumber', 'type': 'int'},
+        'root_run_id': {'key': 'rootRunId', 'type': 'str'},
+        'user_id': {'key': 'userId', 'type': 'str'},
+        'status_revision': {'key': 'statusRevision', 'type': 'long'},
+        'has_virtual_parent': {'key': 'hasVirtualParent', 'type': 'bool'},
+        'current_compute_time': {'key': 'currentComputeTime', 'type': 'str'},
+        'last_start_time_utc': {'key': 'lastStartTimeUtc', 'type': 'iso-8601'},
+        'last_modified_by': {'key': 'lastModifiedBy', 'type': 'User'},
+        'last_modified_utc': {'key': 'lastModifiedUtc', 'type': 'iso-8601'},
+        'duration': {'key': 'duration', 'type': 'str'},
+        'inputs': {'key': 'inputs', 'type': '{TypedAssetReference}'},
+        'outputs': {'key': 'outputs', 'type': '{TypedAssetReference}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: Optional[str] = None,
+        run_uuid: Optional[str] = None,
+        parent_run_uuid: Optional[str] = None,
+        root_run_uuid: Optional[str] = None,
+        target: Optional[str] = None,
+        status: Optional[str] = None,
+        parent_run_id: Optional[str] = None,
+        created_time_utc: Optional[datetime.datetime] = None,
+        start_time_utc: Optional[datetime.datetime] = None,
+        end_time_utc: Optional[datetime.datetime] = None,
+        error: Optional["ErrorResponse"] = None,
+        warnings: Optional[List["RunDetailsWarning"]] = None,
+        tags: Optional[Dict[str, str]] = None,
+        properties: Optional[Dict[str, str]] = None,
+        parameters: Optional[Dict[str, Any]] = None,
+        services: Optional[Dict[str, "EndpointSetting"]] = None,
+        input_datasets: Optional[List["DatasetLineage"]] = None,
+        output_datasets: Optional[List["OutputDatasetLineage"]] = None,
+        run_definition: Optional[Any] = None,
+        log_files: Optional[Dict[str, str]] = None,
+        job_cost: Optional["JobCost"] = None,
+        revision: Optional[int] = None,
+        run_type_v2: Optional["RunTypeV2"] = None,
+        settings: Optional[Dict[str, str]] = None,
+        compute_request: Optional["ComputeRequest"] = None,
+        compute: Optional["Compute"] = None,
+        created_by: Optional["User"] = None,
+        compute_duration: Optional[str] = None,
+        effective_start_time_utc: Optional[datetime.datetime] = None,
+        run_number: Optional[int] = None,
+        root_run_id: Optional[str] = None,
+        user_id: Optional[str] = None,
+        status_revision: Optional[int] = None,
+        has_virtual_parent: Optional[bool] = None,
+        current_compute_time: Optional[str] = None,
+        last_start_time_utc: Optional[datetime.datetime] = None,
+        last_modified_by: Optional["User"] = None,
+        last_modified_utc: Optional[datetime.datetime] = None,
+        duration: Optional[str] = None,
+        inputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        outputs: Optional[Dict[str, "TypedAssetReference"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_id: The identifier for the run.
+        :paramtype run_id: str
+        :keyword run_uuid: A system generated Id for the run.
+        :paramtype run_uuid: str
+        :keyword parent_run_uuid: A system generated Id for the run's parent.
+        :paramtype parent_run_uuid: str
+        :keyword root_run_uuid: A system generated Id for the root of the run's hierarchy.
+        :paramtype root_run_uuid: str
+        :keyword target: The name of the compute target where the run is executed.
+        :paramtype target: str
+        :keyword status: The status of the run. The Status string value maps to the RunStatus Enum.
+        :paramtype status: str
+        :keyword parent_run_id: The parent of the run if the run is hierarchical.
+        :paramtype parent_run_id: str
+        :keyword created_time_utc: The creation time of the run in UTC.
+        :paramtype created_time_utc: ~datetime.datetime
+        :keyword start_time_utc: The start time of the run in UTC.
+        :paramtype start_time_utc: ~datetime.datetime
+        :keyword end_time_utc: The end time of the run in UTC.
+        :paramtype end_time_utc: ~datetime.datetime
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword warnings: A list of warnings that occurred during the run.
+        :paramtype warnings: list[~azure.mgmt.machinelearningservices.models.RunDetailsWarning]
+        :keyword tags: A set of tags. The tag dictionary for the run. Tags are mutable.
+        :paramtype tags: dict[str, str]
+        :keyword properties: The properties dictionary for the run. Properties are immutable.
+        :paramtype properties: dict[str, str]
+        :keyword parameters: The parameters dictionary for the run. Parameters are immutable.
+        :paramtype parameters: dict[str, any]
+        :keyword services: The interactive run services for a run. Services are mutable.
+        :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.EndpointSetting]
+        :keyword input_datasets: A list of dataset used as input to the run.
+        :paramtype input_datasets: list[~azure.mgmt.machinelearningservices.models.DatasetLineage]
+        :keyword output_datasets: A list of dataset used as output to the run.
+        :paramtype output_datasets:
+         list[~azure.mgmt.machinelearningservices.models.OutputDatasetLineage]
+        :keyword run_definition: The run definition specification.
+        :paramtype run_definition: any
+        :keyword log_files: Dictionary of :code:`<string>`.
+        :paramtype log_files: dict[str, str]
+        :keyword job_cost:
+        :paramtype job_cost: ~azure.mgmt.machinelearningservices.models.JobCost
+        :keyword revision:
+        :paramtype revision: long
+        :keyword run_type_v2:
+        :paramtype run_type_v2: ~azure.mgmt.machinelearningservices.models.RunTypeV2
+        :keyword settings: The run settings.
+        :paramtype settings: dict[str, str]
+        :keyword compute_request:
+        :paramtype compute_request: ~azure.mgmt.machinelearningservices.models.ComputeRequest
+        :keyword compute:
+        :paramtype compute: ~azure.mgmt.machinelearningservices.models.Compute
+        :keyword created_by:
+        :paramtype created_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword compute_duration: Time spent in an active state for terminal runs.
+        :paramtype compute_duration: str
+        :keyword effective_start_time_utc: Relative start time of active runs for ordering and
+         computing active compute duration.
+         Compute duration of an active run is now() - EffectiveStartTimeUtc.
+        :paramtype effective_start_time_utc: ~datetime.datetime
+        :keyword run_number: Relative start time of active runs for ordering and computing active
+         compute duration.
+         Compute duration of an active run is now() - EffectiveStartTimeUtc.
+        :paramtype run_number: int
+        :keyword root_run_id:
+        :paramtype root_run_id: str
+        :keyword user_id: The Id of the user that created the run.
+        :paramtype user_id: str
+        :keyword status_revision:
+        :paramtype status_revision: long
+        :keyword has_virtual_parent: Indicates if this is a child of a virtual run.
+        :paramtype has_virtual_parent: bool
+        :keyword current_compute_time: The cumulative time spent in an active status for an active run.
+        :paramtype current_compute_time: str
+        :keyword last_start_time_utc: The last timestamp when a run transitioned from paused to
+         running. Initialized when StartTimeUtc is first set.
+        :paramtype last_start_time_utc: ~datetime.datetime
+        :keyword last_modified_by:
+        :paramtype last_modified_by: ~azure.mgmt.machinelearningservices.models.User
+        :keyword last_modified_utc: The time the run was created in UTC.
+        :paramtype last_modified_utc: ~datetime.datetime
+        :keyword duration: The total duration of a run.
+        :paramtype duration: str
+        :keyword inputs: The inputs for the run.
+        :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        :keyword outputs: The outputs for the run.
+        :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.TypedAssetReference]
+        """
+        super(RunDetails, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.run_uuid = run_uuid
+        self.parent_run_uuid = parent_run_uuid
+        self.root_run_uuid = root_run_uuid
+        self.target = target
+        self.status = status
+        self.parent_run_id = parent_run_id
+        self.created_time_utc = created_time_utc
+        self.start_time_utc = start_time_utc
+        self.end_time_utc = end_time_utc
+        self.error = error
+        self.warnings = warnings
+        self.tags = tags
+        self.properties = properties
+        self.parameters = parameters
+        self.services = services
+        self.input_datasets = input_datasets
+        self.output_datasets = output_datasets
+        self.run_definition = run_definition
+        self.log_files = log_files
+        self.job_cost = job_cost
+        self.revision = revision
+        self.run_type_v2 = run_type_v2
+        self.settings = settings
+        self.compute_request = compute_request
+        self.compute = compute
+        self.created_by = created_by
+        self.compute_duration = compute_duration
+        self.effective_start_time_utc = effective_start_time_utc
+        self.run_number = run_number
+        self.root_run_id = root_run_id
+        self.user_id = user_id
+        self.status_revision = status_revision
+        self.has_virtual_parent = has_virtual_parent
+        self.current_compute_time = current_compute_time
+        self.last_start_time_utc = last_start_time_utc
+        self.last_modified_by = last_modified_by
+        self.last_modified_utc = last_modified_utc
+        self.duration = duration
+        self.inputs = inputs
+        self.outputs = outputs
+
+
+class RunDetailsWarning(msrest.serialization.Model):
+    """RunDetailsWarning.
+
+    :ivar source:
+    :vartype source: str
+    :ivar message:
+    :vartype message: str
+    """
+
+    _attribute_map = {
+        'source': {'key': 'source', 'type': 'str'},
+        'message': {'key': 'message', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        source: Optional[str] = None,
+        message: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword source:
+        :paramtype source: str
+        :keyword message:
+        :paramtype message: str
+        """
+        super(RunDetailsWarning, self).__init__(**kwargs)
+        self.source = source
+        self.message = message
+
+
+class RunMetric(msrest.serialization.Model):
+    """RunMetric.
+
+    :ivar run_id:
+    :vartype run_id: str
+    :ivar metric_id:
+    :vartype metric_id: str
+    :ivar data_container_id:
+    :vartype data_container_id: str
+    :ivar metric_type:
+    :vartype metric_type: str
+    :ivar created_utc:
+    :vartype created_utc: ~datetime.datetime
+    :ivar name:
+    :vartype name: str
+    :ivar description:
+    :vartype description: str
+    :ivar label:
+    :vartype label: str
+    :ivar num_cells:
+    :vartype num_cells: int
+    :ivar data_location:
+    :vartype data_location: str
+    :ivar cells:
+    :vartype cells: list[dict[str, any]]
+    :ivar schema:
+    :vartype schema: ~azure.mgmt.machinelearningservices.models.MetricSchema
+    """
+
+    _attribute_map = {
+        'run_id': {'key': 'runId', 'type': 'str'},
+        'metric_id': {'key': 'metricId', 'type': 'str'},
+        'data_container_id': {'key': 'dataContainerId', 'type': 'str'},
+        'metric_type': {'key': 'metricType', 'type': 'str'},
+        'created_utc': {'key': 'createdUtc', 'type': 'iso-8601'},
+        'name': {'key': 'name', 'type': 'str'},
+        'description': {'key': 'description', 'type': 'str'},
+        'label': {'key': 'label', 'type': 'str'},
+        'num_cells': {'key': 'numCells', 'type': 'int'},
+        'data_location': {'key': 'dataLocation', 'type': 'str'},
+        'cells': {'key': 'cells', 'type': '[{object}]'},
+        'schema': {'key': 'schema', 'type': 'MetricSchema'},
+    }
+
+    def __init__(
+        self,
+        *,
+        run_id: Optional[str] = None,
+        metric_id: Optional[str] = None,
+        data_container_id: Optional[str] = None,
+        metric_type: Optional[str] = None,
+        created_utc: Optional[datetime.datetime] = None,
+        name: Optional[str] = None,
+        description: Optional[str] = None,
+        label: Optional[str] = None,
+        num_cells: Optional[int] = None,
+        data_location: Optional[str] = None,
+        cells: Optional[List[Dict[str, Any]]] = None,
+        schema: Optional["MetricSchema"] = None,
+        **kwargs
+    ):
+        """
+        :keyword run_id:
+        :paramtype run_id: str
+        :keyword metric_id:
+        :paramtype metric_id: str
+        :keyword data_container_id:
+        :paramtype data_container_id: str
+        :keyword metric_type:
+        :paramtype metric_type: str
+        :keyword created_utc:
+        :paramtype created_utc: ~datetime.datetime
+        :keyword name:
+        :paramtype name: str
+        :keyword description:
+        :paramtype description: str
+        :keyword label:
+        :paramtype label: str
+        :keyword num_cells:
+        :paramtype num_cells: int
+        :keyword data_location:
+        :paramtype data_location: str
+        :keyword cells:
+        :paramtype cells: list[dict[str, any]]
+        :keyword schema:
+        :paramtype schema: ~azure.mgmt.machinelearningservices.models.MetricSchema
+        """
+        super(RunMetric, self).__init__(**kwargs)
+        self.run_id = run_id
+        self.metric_id = metric_id
+        self.data_container_id = data_container_id
+        self.metric_type = metric_type
+        self.created_utc = created_utc
+        self.name = name
+        self.description = description
+        self.label = label
+        self.num_cells = num_cells
+        self.data_location = data_location
+        self.cells = cells
+        self.schema = schema
+
+
+class RunOptions(msrest.serialization.Model):
+    """RunOptions.
+
+    :ivar generate_data_container_id_if_not_specified:
+    :vartype generate_data_container_id_if_not_specified: bool
+    """
+
+    _attribute_map = {
+        'generate_data_container_id_if_not_specified': {'key': 'generateDataContainerIdIfNotSpecified', 'type': 'bool'},
+    }
+
+    def __init__(
+        self,
+        *,
+        generate_data_container_id_if_not_specified: Optional[bool] = None,
+        **kwargs
+    ):
+        """
+        :keyword generate_data_container_id_if_not_specified:
+        :paramtype generate_data_container_id_if_not_specified: bool
+        """
+        super(RunOptions, self).__init__(**kwargs)
+        self.generate_data_container_id_if_not_specified = generate_data_container_id_if_not_specified
+
+
+class RunServiceInstances(msrest.serialization.Model):
+    """RunServiceInstances.
+
+    :ivar instances: Dictionary of :code:`<ServiceInstanceResult>`.
+    :vartype instances: dict[str, ~azure.mgmt.machinelearningservices.models.ServiceInstanceResult]
+    """
+
+    _attribute_map = {
+        'instances': {'key': 'instances', 'type': '{ServiceInstanceResult}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        instances: Optional[Dict[str, "ServiceInstanceResult"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword instances: Dictionary of :code:`<ServiceInstanceResult>`.
+        :paramtype instances: dict[str,
+         ~azure.mgmt.machinelearningservices.models.ServiceInstanceResult]
+        """
+        super(RunServiceInstances, self).__init__(**kwargs)
+        self.instances = instances
+
+
+class RunStatusSpans(msrest.serialization.Model):
+    """RunStatusSpans.
+
+    :ivar spans:
+    :vartype spans: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+    """
+
+    _attribute_map = {
+        'spans': {'key': 'spans', 'type': '[SpanDefinition1]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        spans: Optional[List["SpanDefinition1"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword spans:
+        :paramtype spans: list[~azure.mgmt.machinelearningservices.models.SpanDefinition1]
+        """
+        super(RunStatusSpans, self).__init__(**kwargs)
+        self.spans = spans
+
+
+class RunTypeV2(msrest.serialization.Model):
+    """RunTypeV2.
+
+    :ivar orchestrator:
+    :vartype orchestrator: str
+    :ivar traits:
+    :vartype traits: list[str]
+    :ivar attribution:
+    :vartype attribution: str
+    :ivar compute_type:
+    :vartype compute_type: str
+    """
+
+    _validation = {
+        'traits': {'unique': True},
+    }
+
+    _attribute_map = {
+        'orchestrator': {'key': 'orchestrator', 'type': 'str'},
+        'traits': {'key': 'traits', 'type': '[str]'},
+        'attribution': {'key': 'attribution', 'type': 'str'},
+        'compute_type': {'key': 'computeType', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        orchestrator: Optional[str] = None,
+        traits: Optional[List[str]] = None,
+        attribution: Optional[str] = None,
+        compute_type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword orchestrator:
+        :paramtype orchestrator: str
+        :keyword traits:
+        :paramtype traits: list[str]
+        :keyword attribution:
+        :paramtype attribution: str
+        :keyword compute_type:
+        :paramtype compute_type: str
+        """
+        super(RunTypeV2, self).__init__(**kwargs)
+        self.orchestrator = orchestrator
+        self.traits = traits
+        self.attribution = attribution
+        self.compute_type = compute_type
+
+
+class ServiceInstance(msrest.serialization.Model):
+    """ServiceInstance.
+
+    :ivar is_single_node:
+    :vartype is_single_node: bool
+    :ivar error_message:
+    :vartype error_message: str
+    :ivar port:
+    :vartype port: int
+    :ivar status:
+    :vartype status: str
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'is_single_node': {'key': 'isSingleNode', 'type': 'bool'},
+        'error_message': {'key': 'errorMessage', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        is_single_node: Optional[bool] = None,
+        error_message: Optional[str] = None,
+        port: Optional[int] = None,
+        status: Optional[str] = None,
+        error: Optional["ErrorResponse"] = None,
+        properties: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword is_single_node:
+        :paramtype is_single_node: bool
+        :keyword error_message:
+        :paramtype error_message: str
+        :keyword port:
+        :paramtype port: int
+        :keyword status:
+        :paramtype status: str
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(ServiceInstance, self).__init__(**kwargs)
+        self.is_single_node = is_single_node
+        self.error_message = error_message
+        self.port = port
+        self.status = status
+        self.error = error
+        self.properties = properties
+
+
+class ServiceInstanceResult(msrest.serialization.Model):
+    """ServiceInstanceResult.
+
+    :ivar type:
+    :vartype type: str
+    :ivar port:
+    :vartype port: int
+    :ivar status:
+    :vartype status: str
+    :ivar error: The error response.
+    :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+    :ivar endpoint:
+    :vartype endpoint: str
+    :ivar properties: Dictionary of :code:`<string>`.
+    :vartype properties: dict[str, str]
+    """
+
+    _attribute_map = {
+        'type': {'key': 'type', 'type': 'str'},
+        'port': {'key': 'port', 'type': 'int'},
+        'status': {'key': 'status', 'type': 'str'},
+        'error': {'key': 'error', 'type': 'ErrorResponse'},
+        'endpoint': {'key': 'endpoint', 'type': 'str'},
+        'properties': {'key': 'properties', 'type': '{str}'},
+    }
+
+    def __init__(
+        self,
+        *,
+        type: Optional[str] = None,
+        port: Optional[int] = None,
+        status: Optional[str] = None,
+        error: Optional["ErrorResponse"] = None,
+        endpoint: Optional[str] = None,
+        properties: Optional[Dict[str, str]] = None,
+        **kwargs
+    ):
+        """
+        :keyword type:
+        :paramtype type: str
+        :keyword port:
+        :paramtype port: int
+        :keyword status:
+        :paramtype status: str
+        :keyword error: The error response.
+        :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorResponse
+        :keyword endpoint:
+        :paramtype endpoint: str
+        :keyword properties: Dictionary of :code:`<string>`.
+        :paramtype properties: dict[str, str]
+        """
+        super(ServiceInstanceResult, self).__init__(**kwargs)
+        self.type = type
+        self.port = port
+        self.status = status
+        self.error = error
+        self.endpoint = endpoint
+        self.properties = properties
+
+
+class SpanContext(msrest.serialization.Model):
+    """SpanContext.
+
+    :ivar trace_id: Gets the TraceId associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+     TODO: In actual spec, it is ActivityTraceId type. But that causes problems in
+     serialization/deserialization.
+    :vartype trace_id: str
+    :ivar span_id: Gets the SpanId associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+     TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+     serialization/deserialization.
+    :vartype span_id: str
+    :ivar is_remote: Gets a value indicating whether this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext
+     was propagated from a remote parent.
+    :vartype is_remote: bool
+    :ivar is_valid: Gets a value indicating whether this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext is valid.
+    :vartype is_valid: bool
+    :ivar tracestate: Gets the
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.Tracestate associated with this
+     Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+    :vartype tracestate: list[~azure.mgmt.machinelearningservices.models.KeyValuePairString]
+    """
+
+    _attribute_map = {
+        'trace_id': {'key': 'traceId', 'type': 'str'},
+        'span_id': {'key': 'spanId', 'type': 'str'},
+        'is_remote': {'key': 'isRemote', 'type': 'bool'},
+        'is_valid': {'key': 'isValid', 'type': 'bool'},
+        'tracestate': {'key': 'tracestate', 'type': '[KeyValuePairString]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        trace_id: Optional[str] = None,
+        span_id: Optional[str] = None,
+        is_remote: Optional[bool] = None,
+        is_valid: Optional[bool] = None,
+        tracestate: Optional[List["KeyValuePairString"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword trace_id: Gets the TraceId associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+         TODO: In actual spec, it is ActivityTraceId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype trace_id: str
+        :keyword span_id: Gets the SpanId associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+         TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype span_id: str
+        :keyword is_remote: Gets a value indicating whether this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext
+         was propagated from a remote parent.
+        :paramtype is_remote: bool
+        :keyword is_valid: Gets a value indicating whether this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext is valid.
+        :paramtype is_valid: bool
+        :keyword tracestate: Gets the
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.Tracestate associated with this
+         Microsoft.MachineLearning.RunHistory.Contracts.SpanContext.
+        :paramtype tracestate: list[~azure.mgmt.machinelearningservices.models.KeyValuePairString]
+        """
+        super(SpanContext, self).__init__(**kwargs)
+        self.trace_id = trace_id
+        self.span_id = span_id
+        self.is_remote = is_remote
+        self.is_valid = is_valid
+        self.tracestate = tracestate
+
+
+class SpanDefinition1(msrest.serialization.Model):
+    """Most of the code in this class is vendored from here.
+https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry/Trace/Export/SpanData.cs
+SpanData on that github link is readonly, we can't set properties on it after creation. So, just vendoring the Span
+contract.
+TStatus is the status enum. For runs, it is RunStatus
+This is the link for span spec https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/overview.md#span.
+
+    :ivar context:
+    :vartype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+    :ivar name: Gets span name.
+    :vartype name: str
+    :ivar status: Gets span status.
+     OpenTelemetry sets it to
+     https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry.Api/Trace/Status.cs
+     That status enums are not very meaningful to us, so we customize this. Possible values
+     include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued",
+     "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled".
+    :vartype status: str or ~azure.mgmt.machinelearningservices.models.RunStatus
+    :ivar parent_span_id: Gets parent span id.
+     TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+     serialization/deserialization.
+    :vartype parent_span_id: str
+    :ivar attributes: Gets attributes.
+    :vartype attributes: list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringJToken]
+    :ivar events: Gets events.
+    :vartype events: list[~azure.mgmt.machinelearningservices.models.Event]
+    :ivar links: Gets links.
+    :vartype links: list[~azure.mgmt.machinelearningservices.models.Link]
+    :ivar start_timestamp: Gets span start timestamp.
+    :vartype start_timestamp: ~datetime.datetime
+    :ivar end_timestamp: Gets span end timestamp.
+    :vartype end_timestamp: ~datetime.datetime
+    """
+
+    _attribute_map = {
+        'context': {'key': 'context', 'type': 'SpanContext'},
+        'name': {'key': 'name', 'type': 'str'},
+        'status': {'key': 'status', 'type': 'str'},
+        'parent_span_id': {'key': 'parentSpanId', 'type': 'str'},
+        'attributes': {'key': 'attributes', 'type': '[KeyValuePairStringJToken]'},
+        'events': {'key': 'events', 'type': '[Event]'},
+        'links': {'key': 'links', 'type': '[Link]'},
+        'start_timestamp': {'key': 'startTimestamp', 'type': 'iso-8601'},
+        'end_timestamp': {'key': 'endTimestamp', 'type': 'iso-8601'},
+    }
+
+    def __init__(
+        self,
+        *,
+        context: Optional["SpanContext"] = None,
+        name: Optional[str] = None,
+        status: Optional[Union[str, "RunStatus"]] = None,
+        parent_span_id: Optional[str] = None,
+        attributes: Optional[List["KeyValuePairStringJToken"]] = None,
+        events: Optional[List["Event"]] = None,
+        links: Optional[List["Link"]] = None,
+        start_timestamp: Optional[datetime.datetime] = None,
+        end_timestamp: Optional[datetime.datetime] = None,
+        **kwargs
+    ):
+        """
+        :keyword context:
+        :paramtype context: ~azure.mgmt.machinelearningservices.models.SpanContext
+        :keyword name: Gets span name.
+        :paramtype name: str
+        :keyword status: Gets span status.
+         OpenTelemetry sets it to
+         https://github.com/open-telemetry/opentelemetry-dotnet/blob/master/src/OpenTelemetry.Api/Trace/Status.cs
+         That status enums are not very meaningful to us, so we customize this. Possible values
+         include: "NotStarted", "Unapproved", "Pausing", "Paused", "Starting", "Preparing", "Queued",
+         "Running", "Finalizing", "CancelRequested", "Completed", "Failed", "Canceled".
+        :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RunStatus
+        :keyword parent_span_id: Gets parent span id.
+         TODO: In actual spec, it is ActivitySpanId type. But that causes problems in
+         serialization/deserialization.
+        :paramtype parent_span_id: str
+        :keyword attributes: Gets attributes.
+        :paramtype attributes:
+         list[~azure.mgmt.machinelearningservices.models.KeyValuePairStringJToken]
+        :keyword events: Gets events.
+        :paramtype events: list[~azure.mgmt.machinelearningservices.models.Event]
+        :keyword links: Gets links.
+        :paramtype links: list[~azure.mgmt.machinelearningservices.models.Link]
+        :keyword start_timestamp: Gets span start timestamp.
+        :paramtype start_timestamp: ~datetime.datetime
+        :keyword end_timestamp: Gets span end timestamp.
+        :paramtype end_timestamp: ~datetime.datetime
+        """
+        super(SpanDefinition1, self).__init__(**kwargs)
+        self.context = context
+        self.name = name
+        self.status = status
+        self.parent_span_id = parent_span_id
+        self.attributes = attributes
+        self.events = events
+        self.links = links
+        self.start_timestamp = start_timestamp
+        self.end_timestamp = end_timestamp
+
+
+class SqlDataPath(msrest.serialization.Model):
+    """SqlDataPath.
+
+    :ivar sql_table_name:
+    :vartype sql_table_name: str
+    :ivar sql_query:
+    :vartype sql_query: str
+    :ivar sql_stored_procedure_name:
+    :vartype sql_stored_procedure_name: str
+    :ivar sql_stored_procedure_params:
+    :vartype sql_stored_procedure_params:
+     list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+    """
+
+    _attribute_map = {
+        'sql_table_name': {'key': 'sqlTableName', 'type': 'str'},
+        'sql_query': {'key': 'sqlQuery', 'type': 'str'},
+        'sql_stored_procedure_name': {'key': 'sqlStoredProcedureName', 'type': 'str'},
+        'sql_stored_procedure_params': {'key': 'sqlStoredProcedureParams', 'type': '[StoredProcedureParameter]'},
+    }
+
+    def __init__(
+        self,
+        *,
+        sql_table_name: Optional[str] = None,
+        sql_query: Optional[str] = None,
+        sql_stored_procedure_name: Optional[str] = None,
+        sql_stored_procedure_params: Optional[List["StoredProcedureParameter"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword sql_table_name:
+        :paramtype sql_table_name: str
+        :keyword sql_query:
+        :paramtype sql_query: str
+        :keyword sql_stored_procedure_name:
+        :paramtype sql_stored_procedure_name: str
+        :keyword sql_stored_procedure_params:
+        :paramtype sql_stored_procedure_params:
+         list[~azure.mgmt.machinelearningservices.models.StoredProcedureParameter]
+        """
+        super(SqlDataPath, self).__init__(**kwargs)
+        self.sql_table_name = sql_table_name
+        self.sql_query = sql_query
+        self.sql_stored_procedure_name = sql_stored_procedure_name
+        self.sql_stored_procedure_params = sql_stored_procedure_params
+
+
+class StoredProcedureParameter(msrest.serialization.Model):
+    """StoredProcedureParameter.
+
+    :ivar name:
+    :vartype name: str
+    :ivar value:
+    :vartype value: str
+    :ivar type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+    :vartype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+    """
+
+    _attribute_map = {
+        'name': {'key': 'name', 'type': 'str'},
+        'value': {'key': 'value', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        name: Optional[str] = None,
+        value: Optional[str] = None,
+        type: Optional[Union[str, "StoredProcedureParameterType"]] = None,
+        **kwargs
+    ):
+        """
+        :keyword name:
+        :paramtype name: str
+        :keyword value:
+        :paramtype value: str
+        :keyword type: Possible values include: "String", "Int", "Decimal", "Guid", "Boolean", "Date".
+        :paramtype type: str or ~azure.mgmt.machinelearningservices.models.StoredProcedureParameterType
+        """
+        super(StoredProcedureParameter, self).__init__(**kwargs)
+        self.name = name
+        self.value = value
+        self.type = type
+
+
+class TypedAssetReference(msrest.serialization.Model):
+    """TypedAssetReference.
+
+    :ivar asset_id:
+    :vartype asset_id: str
+    :ivar type:
+    :vartype type: str
+    """
+
+    _attribute_map = {
+        'asset_id': {'key': 'assetId', 'type': 'str'},
+        'type': {'key': 'type', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        asset_id: Optional[str] = None,
+        type: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword asset_id:
+        :paramtype asset_id: str
+        :keyword type:
+        :paramtype type: str
+        """
+        super(TypedAssetReference, self).__init__(**kwargs)
+        self.asset_id = asset_id
+        self.type = type
+
+
+class User(msrest.serialization.Model):
+    """User.
+
+    :ivar user_object_id: A user or service principal's object ID.
+     This is EUPI and may only be logged to warm path telemetry.
+    :vartype user_object_id: str
+    :ivar user_pu_id: A user or service principal's PuID.
+     This is PII and should never be logged.
+    :vartype user_pu_id: str
+    :ivar user_idp: A user identity provider. Eg live.com
+     This is PII and should never be logged.
+    :vartype user_idp: str
+    :ivar user_alt_sec_id: A user alternate sec id. This represents the user in a different
+     identity provider system Eg.1:live.com:puid
+     This is PII and should never be logged.
+    :vartype user_alt_sec_id: str
+    :ivar user_iss: The issuer which issed the token for this user.
+     This is PII and should never be logged.
+    :vartype user_iss: str
+    :ivar user_tenant_id: A user or service principal's tenant ID.
+    :vartype user_tenant_id: str
+    :ivar user_name: A user's full name or a service principal's app ID.
+     This is PII and should never be logged.
+    :vartype user_name: str
+    :ivar upn: A user's Principal name (upn)
+     This is PII andshould never be logged.
+    :vartype upn: str
+    """
+
+    _attribute_map = {
+        'user_object_id': {'key': 'userObjectId', 'type': 'str'},
+        'user_pu_id': {'key': 'userPuId', 'type': 'str'},
+        'user_idp': {'key': 'userIdp', 'type': 'str'},
+        'user_alt_sec_id': {'key': 'userAltSecId', 'type': 'str'},
+        'user_iss': {'key': 'userIss', 'type': 'str'},
+        'user_tenant_id': {'key': 'userTenantId', 'type': 'str'},
+        'user_name': {'key': 'userName', 'type': 'str'},
+        'upn': {'key': 'upn', 'type': 'str'},
+    }
+
+    def __init__(
+        self,
+        *,
+        user_object_id: Optional[str] = None,
+        user_pu_id: Optional[str] = None,
+        user_idp: Optional[str] = None,
+        user_alt_sec_id: Optional[str] = None,
+        user_iss: Optional[str] = None,
+        user_tenant_id: Optional[str] = None,
+        user_name: Optional[str] = None,
+        upn: Optional[str] = None,
+        **kwargs
+    ):
+        """
+        :keyword user_object_id: A user or service principal's object ID.
+         This is EUPI and may only be logged to warm path telemetry.
+        :paramtype user_object_id: str
+        :keyword user_pu_id: A user or service principal's PuID.
+         This is PII and should never be logged.
+        :paramtype user_pu_id: str
+        :keyword user_idp: A user identity provider. Eg live.com
+         This is PII and should never be logged.
+        :paramtype user_idp: str
+        :keyword user_alt_sec_id: A user alternate sec id. This represents the user in a different
+         identity provider system Eg.1:live.com:puid
+         This is PII and should never be logged.
+        :paramtype user_alt_sec_id: str
+        :keyword user_iss: The issuer which issed the token for this user.
+         This is PII and should never be logged.
+        :paramtype user_iss: str
+        :keyword user_tenant_id: A user or service principal's tenant ID.
+        :paramtype user_tenant_id: str
+        :keyword user_name: A user's full name or a service principal's app ID.
+         This is PII and should never be logged.
+        :paramtype user_name: str
+        :keyword upn: A user's Principal name (upn)
+         This is PII andshould never be logged.
+        :paramtype upn: str
+        """
+        super(User, self).__init__(**kwargs)
+        self.user_object_id = user_object_id
+        self.user_pu_id = user_pu_id
+        self.user_idp = user_idp
+        self.user_alt_sec_id = user_alt_sec_id
+        self.user_iss = user_iss
+        self.user_tenant_id = user_tenant_id
+        self.user_name = user_name
+        self.upn = upn
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/__init__.py
new file mode 100644
index 00000000..3e84a44a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/__init__.py
@@ -0,0 +1,27 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._delete_operations import DeleteOperations
+from ._events_operations import EventsOperations
+from ._experiments_operations import ExperimentsOperations
+from ._metric_operations import MetricOperations
+from ._runs_operations import RunsOperations
+from ._run_artifacts_operations import RunArtifactsOperations
+from ._run_operations import RunOperations
+from ._spans_operations import SpansOperations
+
+__all__ = [
+    'DeleteOperations',
+    'EventsOperations',
+    'ExperimentsOperations',
+    'MetricOperations',
+    'RunsOperations',
+    'RunArtifactsOperations',
+    'RunOperations',
+    'SpansOperations',
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_delete_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_delete_operations.py
new file mode 100644
index 00000000..9080611e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_delete_operations.py
@@ -0,0 +1,248 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_patch_configuration_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_configuration_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class DeleteOperations(object):
+    """DeleteOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def patch_configuration(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.DeleteConfiguration"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DeleteConfiguration"
+        """patch_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteConfiguration')
+        else:
+            _json = None
+
+        request = build_patch_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.patch_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    patch_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
+
+    @distributed_trace
+    def get_configuration(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DeleteConfiguration"
+        """get_configuration.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteConfiguration, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteConfiguration
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteConfiguration"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_configuration_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            template_url=self.get_configuration.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteConfiguration', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_configuration.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/deleteConfiguration"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_events_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_events_operations.py
new file mode 100644
index 00000000..f87546d9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_events_operations.py
@@ -0,0 +1,713 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_batch_post_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_post_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_post_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batch/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_post_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_post_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_post_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/events")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class EventsOperations(object):
+    """EventsOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def batch_post_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.BatchEventCommand"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchEventCommandResult"
+        """batch_post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_post_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.BatchEventCommand"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchEventCommandResult"
+        """batch_post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/events"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_post(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.BatchEventCommand"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchEventCommandResult"
+        """batch_post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchEventCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchEventCommandResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchEventCommandResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchEventCommandResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchEventCommand')
+        else:
+            _json = None
+
+        request = build_batch_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchEventCommandResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batch/events"}  # type: ignore
+
+
+    @distributed_trace
+    def post_by_experiment_name(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.BaseEvent"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """post_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace
+    def post_by_experiment_id(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.BaseEvent"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """post_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/events"}  # type: ignore
+
+
+    @distributed_trace
+    def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.BaseEvent"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BaseEvent
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BaseEvent')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/events"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_experiments_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_experiments_operations.py
new file mode 100644
index 00000000..3b12b9c1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_experiments_operations.py
@@ -0,0 +1,878 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_create_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_update_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_request_initial(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_query_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+    url_safe_experiment_names_only = kwargs.pop('url_safe_experiment_names_only', True)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments:query")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if url_safe_experiment_names_only is not None:
+        _query_parameters['urlSafeExperimentNamesOnly'] = _SERIALIZER.query("url_safe_experiment_names_only", url_safe_experiment_names_only, 'bool')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_tags_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/tags:delete")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class ExperimentsOperations(object):
+    """ExperimentsOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Experiment"
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace
+    def create(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Experiment"
+        """Create an Experiment.
+
+        Create a new Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name: The experiment name.
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_create_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            template_url=self.create.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    create.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Experiment"
+        """Get details of an Experiment.
+
+        Get details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self.get_by_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace
+    def update(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.ModifyExperiment"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Experiment"
+        """Update details of an Experiment.
+
+        Update details of an Experiment with specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: Experiment details which needs to be updated.
+        :type body: ~azure.mgmt.machinelearningservices.models.ModifyExperiment
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Experiment, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Experiment
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Experiment"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ModifyExperiment')
+        else:
+            _json = None
+
+        request = build_update_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.update.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Experiment', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    update.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    def _delete_initial(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Any
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            template_url=self._delete_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_initial.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+
+    @distributed_trace
+    def begin_delete(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> LROPoller[Any]
+        """Delete an Experiment.
+
+        Delete an existing Empty Experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
+         operation to not poll, or pass in your own initialized polling object for a personal polling
+         strategy.
+        :paramtype polling: bool or ~azure.core.polling.PollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of LROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.LROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, PollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = self._delete_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = NoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return LROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        url_safe_experiment_names_only=True,  # type: Optional[bool]
+        body=None,  # type: Optional["_models.ExperimentQueryParams"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedExperimentList"]
+        """Get all Experiments in a specific workspace.
+
+        Get all experiments in a specific workspace with the specified query filters.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param url_safe_experiment_names_only:
+        :type url_safe_experiment_names_only: bool
+        :param body: Query parameters for data sorting and filtering.
+        :type body: ~azure.mgmt.machinelearningservices.models.ExperimentQueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedExperimentList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedExperimentList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedExperimentList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=self.get_by_query.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ExperimentQueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    url_safe_experiment_names_only=url_safe_experiment_names_only,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedExperimentList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_by_query.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments:query"}  # type: ignore
+
+    @distributed_trace
+    def delete_tags(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.DeleteTagsCommand"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.DeleteExperimentTagsResult"
+        """Delete list of Tags in an Experiment.
+
+        Delete list of Tags from a specific Experiment Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id: The identifier of the experiment.
+        :type experiment_id: str
+        :param body: The requested tags list to be deleted.
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteTagsCommand
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: DeleteExperimentTagsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.DeleteExperimentTagsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.DeleteExperimentTagsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteTagsCommand')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('DeleteExperimentTagsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/tags:delete"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_metric_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_metric_operations.py
new file mode 100644
index 00000000..f9184554
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_metric_operations.py
@@ -0,0 +1,1206 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_full_fidelity_metric_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/full")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_metric_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/list")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_generic_resource_metrics_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/azuremonitor/list")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_sampled_metric_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/sample")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_post_run_metrics_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/batch")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_metrics_by_data_container_id_request_initial(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    data_container_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+        "dataContainerId": _SERIALIZER.url("data_container_id", data_container_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_metrics_by_run_id_request_initial(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_metric_details_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    metric_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/metrics/{metricId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "metricId": _SERIALIZER.url("metric_id", metric_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_metric_details_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    metric_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/metrics/{metricId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "metricId": _SERIALIZER.url("metric_id", metric_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class MetricOperations(object):
+    """MetricOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_full_fidelity_metric(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.RetrieveFullFidelityMetricRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.MetricV2"
+        """API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        API to retrieve the full-fidelity sequence associated with a particular run and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RetrieveFullFidelityMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricV2, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricV2
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricV2"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RetrieveFullFidelityMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_full_fidelity_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_full_fidelity_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricV2', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_full_fidelity_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/full"}  # type: ignore
+
+
+    @distributed_trace
+    def list_metric(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.ListMetrics"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedMetricDefinitionList"]
+        """API to list metric for a particular datacontainer and metricName.
+
+        API to list metric for a particular datacontainer and metricName.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_metric.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_metric_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/list"}  # type: ignore
+
+    @distributed_trace
+    def list_generic_resource_metrics(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.ListGenericResourceMetrics"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedMetricDefinitionList"]
+        """API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        API to list workspace/subworkspace resource metrics for a particular ResourceId.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ListGenericResourceMetrics
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedMetricDefinitionList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedMetricDefinitionList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedMetricDefinitionList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.list_generic_resource_metrics.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'ListGenericResourceMetrics')
+                else:
+                    _json = None
+                
+                request = build_list_generic_resource_metrics_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedMetricDefinitionList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_generic_resource_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/azuremonitor/list"}  # type: ignore
+
+    @distributed_trace
+    def get_sampled_metric(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.GetSampledMetricRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.MetricSample"
+        """Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        Stub for future action
+        API to retrieve samples for one or many runs to compare a given metricName
+        Throw if schemas don't match across metrics.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetSampledMetricRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: MetricSample, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.MetricSample
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.MetricSample"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetSampledMetricRequest')
+        else:
+            _json = None
+
+        request = build_get_sampled_metric_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_sampled_metric.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('MetricSample', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sampled_metric.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/sample"}  # type: ignore
+
+
+    @distributed_trace
+    def post_run_metrics(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.BatchIMetricV2"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.PostRunMetricsResult"
+        """Post Metrics to a Run.
+
+        Post Metrics to a specific Run Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchIMetricV2
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: PostRunMetricsResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.PostRunMetricsResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PostRunMetricsResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchIMetricV2')
+        else:
+            _json = None
+
+        request = build_post_run_metrics_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post_run_metrics.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('PostRunMetricsResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    post_run_metrics.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/batch"}  # type: ignore
+
+
+    def _delete_metrics_by_data_container_id_initial(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        data_container_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Any
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_data_container_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            data_container_id=data_container_id,
+            template_url=self._delete_metrics_by_data_container_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_data_container_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace
+    def begin_delete_metrics_by_data_container_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        data_container_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> LROPoller[Any]
+        """API to delete metrics by data container id.
+
+        API to delete metrics by data container id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param data_container_id:
+        :type data_container_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
+         operation to not poll, or pass in your own initialized polling object for a personal polling
+         strategy.
+        :paramtype polling: bool or ~azure.core.polling.PollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of LROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.LROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, PollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = self._delete_metrics_by_data_container_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                experiment_id=experiment_id,
+                data_container_id=data_container_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = NoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return LROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_data_container_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentId}/containers/{dataContainerId}/deleteMetrics"}  # type: ignore
+
+    def _delete_metrics_by_run_id_initial(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Any
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_delete_metrics_by_run_id_request_initial(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self._delete_metrics_by_run_id_initial.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('object', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    _delete_metrics_by_run_id_initial.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+
+    @distributed_trace
+    def begin_delete_metrics_by_run_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> LROPoller[Any]
+        """API to delete metrics by run id.
+
+        API to delete metrics by run id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+        :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
+         operation to not poll, or pass in your own initialized polling object for a personal polling
+         strategy.
+        :paramtype polling: bool or ~azure.core.polling.PollingMethod
+        :keyword int polling_interval: Default waiting time between two polls for LRO operations if no
+         Retry-After header is present.
+        :return: An instance of LROPoller that returns either any or the result of cls(response)
+        :rtype: ~azure.core.polling.LROPoller[any]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        polling = kwargs.pop('polling', True)  # type: Union[bool, PollingMethod]
+        cls = kwargs.pop('cls', None)  # type: ClsType[Any]
+        lro_delay = kwargs.pop(
+            'polling_interval',
+            self._config.polling_interval
+        )
+        cont_token = kwargs.pop('continuation_token', None)  # type: Optional[str]
+        if cont_token is None:
+            raw_result = self._delete_metrics_by_run_id_initial(
+                subscription_id=subscription_id,
+                resource_group_name=resource_group_name,
+                workspace_name=workspace_name,
+                run_id=run_id,
+                cls=lambda x,y,z: x,
+                **kwargs
+            )
+        kwargs.pop('error_map', None)
+
+        def get_long_running_output(pipeline_response):
+            response = pipeline_response.http_response
+            deserialized = self._deserialize('object', pipeline_response)
+            if cls:
+                return cls(pipeline_response, deserialized, {})
+            return deserialized
+
+
+        if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
+        elif polling is False: polling_method = NoPolling()
+        else: polling_method = polling
+        if cont_token:
+            return LROPoller.from_continuation_token(
+                polling_method=polling_method,
+                continuation_token=cont_token,
+                client=self._client,
+                deserialization_callback=get_long_running_output
+            )
+        return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+
+    begin_delete_metrics_by_run_id.metadata = {'url': "/metric/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/deleteMetrics"}  # type: ignore
+
+    @distributed_trace
+    def get_metric_details_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        metric_id,  # type: str
+        experiment_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunMetric"
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_name=experiment_name,
+            template_url=self.get_metric_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/metrics/{metricId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_metric_details_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        metric_id,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunMetric"
+        """Get Metric details.
+
+        Get Metric details for a specific Metric Id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param metric_id: The identifier for a Metric.
+        :type metric_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunMetric, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunMetric
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunMetric"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_metric_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            metric_id=metric_id,
+            experiment_id=experiment_id,
+            template_url=self.get_metric_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunMetric', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_metric_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/metrics/{metricId}"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_artifacts_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_artifacts_operations.py
new file mode 100644
index 00000000..c7dd5930
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_artifacts_operations.py
@@ -0,0 +1,1850 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_list_in_container_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_in_container_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_in_path_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/path")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_in_path_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/path")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_id_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/metadata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_id_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/metadata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_content_information_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/contentinfo")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_content_information_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/contentinfo")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_sas_uri_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/artifacturi")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_sas_uri_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/artifacturi")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_sas_by_prefix_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/prefix/contentinfo")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_sas_by_prefix_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    path = kwargs.pop('path', None)  # type: Optional[str]
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/prefix/contentinfo")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if path is not None:
+        _query_parameters['path'] = _SERIALIZER.query("path", path, 'str')
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_create_empty_artifacts_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/batch/metadata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_create_empty_artifacts_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/batch/metadata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class RunArtifactsOperations(object):
+    """RunArtifactsOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_in_container_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactList"]
+        """list_in_container_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_container_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactList"]
+        """list_in_container_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_container_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_container_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_in_container_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        path=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactList"]
+        """list_in_path_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace
+    def list_in_path_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        path=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactList"]
+        """list_in_path_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactList or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_in_path_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_in_path_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_in_path_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/path"}  # type: ignore
+
+    @distributed_trace
+    def get_by_id_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Artifact"
+        """get_by_id_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_by_id_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_id_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Artifact"
+        """get_by_id_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Artifact, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Artifact
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Artifact"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_id_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_by_id_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Artifact', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_id_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/metadata"}  # type: ignore
+
+
+    @distributed_trace
+    def get_content_information_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.ArtifactContentInformation"
+        """get_content_information_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_content_information_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace
+    def get_content_information_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.ArtifactContentInformation"
+        """get_content_information_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: ArtifactContentInformation, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.ArtifactContentInformation
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.ArtifactContentInformation"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_content_information_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_content_information_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('ArtifactContentInformation', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_content_information_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/contentinfo"}  # type: ignore
+
+
+    @distributed_trace
+    def get_sas_uri_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> str
+        """get_sas_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace
+    def get_sas_uri_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        path=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> str
+        """get_sas_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: str, or the result of cls(response)
+        :rtype: str
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[str]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_sas_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            path=path,
+            template_url=self.get_sas_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('str', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_sas_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/artifacturi"}  # type: ignore
+
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        path=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactContentInformationList"]
+        """list_sas_by_prefix_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace
+    def list_sas_by_prefix_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        path=None,  # type: Optional[str]
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedArtifactContentInformationList"]
+        """list_sas_by_prefix_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param path:
+        :type path: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedArtifactContentInformationList or the
+         result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedArtifactContentInformationList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedArtifactContentInformationList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list_sas_by_prefix_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_sas_by_prefix_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    path=path,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedArtifactContentInformationList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_sas_by_prefix_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/prefix/contentinfo"}  # type: ignore
+
+    @distributed_trace
+    def batch_create_empty_artifacts_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.ArtifactPathList"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchArtifactContentInformationResult"
+        """batch_create_empty_artifacts_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_create_empty_artifacts_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.ArtifactPathList"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchArtifactContentInformationResult"
+        """batch_create_empty_artifacts_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.ArtifactPathList
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchArtifactContentInformationResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchArtifactContentInformationResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchArtifactContentInformationResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'ArtifactPathList')
+        else:
+            _json = None
+
+        request = build_batch_create_empty_artifacts_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_create_empty_artifacts_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchArtifactContentInformationResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_create_empty_artifacts_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/artifacts/batch/metadata"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_operations.py
new file mode 100644
index 00000000..c8591bff
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_run_operations.py
@@ -0,0 +1,233 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_list_by_compute_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    compute_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    filter = kwargs.pop('filter', None)  # type: Optional[str]
+    continuationtoken = kwargs.pop('continuationtoken', None)  # type: Optional[str]
+    orderby = kwargs.pop('orderby', None)  # type: Optional[List[str]]
+    sortorder = kwargs.pop('sortorder', None)  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+    top = kwargs.pop('top', None)  # type: Optional[int]
+    count = kwargs.pop('count', None)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/runs")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "computeName": _SERIALIZER.url("compute_name", compute_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if filter is not None:
+        _query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
+    if continuationtoken is not None:
+        _query_parameters['$continuationtoken'] = _SERIALIZER.query("continuationtoken", continuationtoken, 'str')
+    if orderby is not None:
+        _query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, '[str]')
+    if sortorder is not None:
+        _query_parameters['$sortorder'] = _SERIALIZER.query("sortorder", sortorder, 'str')
+    if top is not None:
+        _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
+    if count is not None:
+        _query_parameters['$count'] = _SERIALIZER.query("count", count, 'bool')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class RunOperations(object):
+    """RunOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def list_by_compute(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        compute_name,  # type: str
+        filter=None,  # type: Optional[str]
+        continuationtoken=None,  # type: Optional[str]
+        orderby=None,  # type: Optional[List[str]]
+        sortorder=None,  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+        top=None,  # type: Optional[int]
+        count=None,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """list_by_compute.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param compute_name:
+        :type compute_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.list_by_compute.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_by_compute_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    compute_name=compute_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list_by_compute.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/runs"}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_runs_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_runs_operations.py
new file mode 100644
index 00000000..a95f5652
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_runs_operations.py
@@ -0,0 +1,3972 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_get_child_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    filter = kwargs.pop('filter', None)  # type: Optional[str]
+    continuationtoken = kwargs.pop('continuationtoken', None)  # type: Optional[str]
+    orderby = kwargs.pop('orderby', None)  # type: Optional[List[str]]
+    sortorder = kwargs.pop('sortorder', None)  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+    top = kwargs.pop('top', None)  # type: Optional[int]
+    count = kwargs.pop('count', None)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/children")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if filter is not None:
+        _query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
+    if continuationtoken is not None:
+        _query_parameters['$continuationtoken'] = _SERIALIZER.query("continuationtoken", continuationtoken, 'str')
+    if orderby is not None:
+        _query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, '[str]')
+    if sortorder is not None:
+        _query_parameters['$sortorder'] = _SERIALIZER.query("sortorder", sortorder, 'str')
+    if top is not None:
+        _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
+    if count is not None:
+        _query_parameters['$count'] = _SERIALIZER.query("count", count, 'bool')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_child_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    filter = kwargs.pop('filter', None)  # type: Optional[str]
+    continuationtoken = kwargs.pop('continuationtoken', None)  # type: Optional[str]
+    orderby = kwargs.pop('orderby', None)  # type: Optional[List[str]]
+    sortorder = kwargs.pop('sortorder', None)  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+    top = kwargs.pop('top', None)  # type: Optional[int]
+    count = kwargs.pop('count', None)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/children")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if filter is not None:
+        _query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
+    if continuationtoken is not None:
+        _query_parameters['$continuationtoken'] = _SERIALIZER.query("continuationtoken", continuationtoken, 'str')
+    if orderby is not None:
+        _query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, '[str]')
+    if sortorder is not None:
+        _query_parameters['$sortorder'] = _SERIALIZER.query("sortorder", sortorder, 'str')
+    if top is not None:
+        _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
+    if count is not None:
+        _query_parameters['$count'] = _SERIALIZER.query("count", count, 'bool')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_child_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    filter = kwargs.pop('filter', None)  # type: Optional[str]
+    continuationtoken = kwargs.pop('continuationtoken', None)  # type: Optional[str]
+    orderby = kwargs.pop('orderby', None)  # type: Optional[List[str]]
+    sortorder = kwargs.pop('sortorder', None)  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+    top = kwargs.pop('top', None)  # type: Optional[int]
+    count = kwargs.pop('count', None)  # type: Optional[bool]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/children")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if filter is not None:
+        _query_parameters['$filter'] = _SERIALIZER.query("filter", filter, 'str')
+    if continuationtoken is not None:
+        _query_parameters['$continuationtoken'] = _SERIALIZER.query("continuationtoken", continuationtoken, 'str')
+    if orderby is not None:
+        _query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, '[str]')
+    if sortorder is not None:
+        _query_parameters['$sortorder'] = _SERIALIZER.query("sortorder", sortorder, 'str')
+    if top is not None:
+        _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
+    if count is not None:
+        _query_parameters['$count'] = _SERIALIZER.query("count", count, 'bool')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_details_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/details")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_details_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/details")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_details_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/details")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_run_data_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/rundata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_get_run_data_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchrundata")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_add_or_modify_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/runs")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_batch_add_or_modify_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/runs")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_add_or_modify_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str', pattern=r'^[a-zA-Z0-9][\w-]{0,255}$'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_add_or_modify_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str', pattern=r'^[a-zA-Z0-9][\w-]{0,255}$'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_add_or_modify_experiment_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str', pattern=r'^[a-zA-Z0-9][\w-]{0,255}$'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_add_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str', pattern=r'^[a-zA-Z0-9][\w-]{0,255}$'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_tags_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_modify_or_delete_tags_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_tags_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_modify_or_delete_tags_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_tags_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/tags")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_run_services_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/services")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_run_services_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/services")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_delete_run_services_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/services")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="DELETE",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_add_or_modify_run_service_instances_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    node_id,  # type: int
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "nodeId": _SERIALIZER.url("node_id", node_id, 'int'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="PATCH",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_run_service_instances_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    node_id,  # type: int
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "nodeId": _SERIALIZER.url("node_id", node_id, 'int'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_query_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs:query")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_query_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs:query")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_ids_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/runIds")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_by_ids_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/runIds")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_cancel_run_with_uri_by_experiment_id_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    cancelation_reason = kwargs.pop('cancelation_reason', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/cancel")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentId": _SERIALIZER.url("experiment_id", experiment_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if cancelation_reason is not None:
+        _query_parameters['CancelationReason'] = _SERIALIZER.query("cancelation_reason", cancelation_reason, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_cancel_run_with_uri_by_experiment_name_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    experiment_name,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    cancelation_reason = kwargs.pop('cancelation_reason', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/cancel")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+        "experimentName": _SERIALIZER.url("experiment_name", experiment_name, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if cancelation_reason is not None:
+        _query_parameters['CancelationReason'] = _SERIALIZER.query("cancelation_reason", cancelation_reason, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class RunsOperations(object):  # pylint: disable=too-many-public-methods
+    """RunsOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def get_child_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        filter=None,  # type: Optional[str]
+        continuationtoken=None,  # type: Optional[str]
+        orderby=None,  # type: Optional[List[str]]
+        sortorder=None,  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+        top=None,  # type: Optional[int]
+        count=None,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """get_child_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_name=experiment_name,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        filter=None,  # type: Optional[str]
+        continuationtoken=None,  # type: Optional[str]
+        orderby=None,  # type: Optional[List[str]]
+        sortorder=None,  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+        top=None,  # type: Optional[int]
+        count=None,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """get_child_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    experiment_id=experiment_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_child_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_child(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        filter=None,  # type: Optional[str]
+        continuationtoken=None,  # type: Optional[str]
+        orderby=None,  # type: Optional[List[str]]
+        sortorder=None,  # type: Optional[Union[str, "_models.SortOrderDirection"]]
+        top=None,  # type: Optional[int]
+        count=None,  # type: Optional[bool]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """get_child.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param filter: Allows for filtering the collection of resources.
+         The expression specified is evaluated for each resource in the collection, and only items
+         where the expression evaluates to true are included in the response.
+        :type filter: str
+        :param continuationtoken: The continuation token to use for getting the next set of resources.
+        :type continuationtoken: str
+        :param orderby: The list of resource properties to use for sorting the requested resources.
+        :type orderby: list[str]
+        :param sortorder: The sort order of the returned resources. Not used, specify asc or desc after
+         each property name in the OrderBy parameter.
+        :type sortorder: str or ~azure.mgmt.machinelearningservices.models.SortOrderDirection
+        :param top: The maximum number of items in the resource collection to be included in the
+         result.
+         If not specified, all items are returned.
+        :type top: int
+        :param count: Whether to include a count of the matching resources along with the resources
+         returned in the response.
+        :type count: bool
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=self.get_child.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_child_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    filter=filter,
+                    continuationtoken=continuationtoken,
+                    orderby=orderby,
+                    sortorder=sortorder,
+                    top=top,
+                    count=count,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_child.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/children"}  # type: ignore
+
+    @distributed_trace
+    def get_details_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunDetails"
+        """get_details_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_details_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace
+    def get_details_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunDetails"
+        """get_details_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_details_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace
+    def get_details(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunDetails"
+        """get_details.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunDetails, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunDetails
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunDetails"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_details_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get_details.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunDetails', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_details.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/details"}  # type: ignore
+
+
+    @distributed_trace
+    def get_run_data(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.GetRunDataRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.GetRunDataResult"
+        """get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunDataRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: GetRunDataResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.GetRunDataResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.GetRunDataResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunDataRequest')
+        else:
+            _json = None
+
+        request = build_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('GetRunDataResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/rundata"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_get_run_data(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        body=None,  # type: Optional["_models.BatchRequest1"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchResult1"
+        """batch_get_run_data.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchRequest1
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchResult1, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchResult1
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchResult1"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchRequest1')
+        else:
+            _json = None
+
+        request = build_batch_get_run_data_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_get_run_data.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200, 207]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if response.status_code == 200:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if response.status_code == 207:
+            deserialized = self._deserialize('BatchResult1', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_get_run_data.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchrundata"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_add_or_modify_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.BatchAddOrModifyRunRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchRunResult"
+        """batch_add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace
+    def batch_add_or_modify_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.BatchAddOrModifyRunRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchRunResult"
+        """batch_add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.BatchAddOrModifyRunRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'BatchAddOrModifyRunRequest')
+        else:
+            _json = None
+
+        request = build_batch_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.batch_add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    batch_add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/batch/runs"}  # type: ignore
+
+
+    @distributed_trace
+    def add_or_modify_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.CreateRun"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """add_or_modify_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """get_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            template_url=self.get_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def add_or_modify_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.CreateRun"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """add_or_modify_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """get_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            template_url=self.get_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def add_or_modify_experiment(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.CreateRun"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """add_or_modify_experiment.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_or_modify_experiment_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_experiment.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_experiment.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def add(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.CreateRun"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """add.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.CreateRun
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'CreateRun')
+        else:
+            _json = None
+
+        request = build_add_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """get.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            template_url=self.get.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_tags_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional[List[str]]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace
+    def modify_or_delete_tags_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.DeleteOrModifyTags"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """modify_or_delete_tags_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_tags_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional[List[str]]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace
+    def modify_or_delete_tags_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.DeleteOrModifyTags"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """modify_or_delete_tags_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteOrModifyTags
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteOrModifyTags')
+        else:
+            _json = None
+
+        request = build_modify_or_delete_tags_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.modify_or_delete_tags_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    modify_or_delete_tags_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_tags(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional[List[str]]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_tags.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: list[str]
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, '[str]')
+        else:
+            _json = None
+
+        request = build_delete_tags_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_tags.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_tags.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/tags"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_run_services_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.DeleteRunServices"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_run_services_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_run_services_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.DeleteRunServices"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_run_services_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace
+    def delete_run_services(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.DeleteRunServices"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """delete_run_services.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.DeleteRunServices
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'DeleteRunServices')
+        else:
+            _json = None
+
+        request = build_delete_run_services_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.delete_run_services.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    delete_run_services.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/services"}  # type: ignore
+
+
+    @distributed_trace
+    def add_or_modify_run_service_instances(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        node_id,  # type: int
+        body=None,  # type: Optional["_models.AddOrModifyRunServiceInstancesRequest"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunServiceInstances"
+        """add_or_modify_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.AddOrModifyRunServiceInstancesRequest
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'AddOrModifyRunServiceInstancesRequest')
+        else:
+            _json = None
+
+        request = build_add_or_modify_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.add_or_modify_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    add_or_modify_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_run_service_instances(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        node_id,  # type: int
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.RunServiceInstances"
+        """get_run_service_instances.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param node_id:
+        :type node_id: int
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: RunServiceInstances, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.RunServiceInstances
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.RunServiceInstances"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_get_run_service_instances_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            node_id=node_id,
+            template_url=self.get_run_service_instances.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('RunServiceInstances', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_run_service_instances.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/serviceinstances/{nodeId}"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_query_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.QueryParams"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """get_by_query_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_name.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_name_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_name=experiment_name,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs:query"}  # type: ignore
+
+    @distributed_trace
+    def get_by_query_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.QueryParams"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedRunList"]
+        """get_by_query_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.QueryParams
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedRunList or the result of cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedRunList]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedRunList"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=self.get_by_query_by_experiment_id.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                if body is not None:
+                    _json = self._serialize.body(body, 'QueryParams')
+                else:
+                    _json = None
+                
+                request = build_get_by_query_by_experiment_id_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    experiment_id=experiment_id,
+                    content_type=content_type,
+                    json=_json,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedRunList", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_by_query_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs:query"}  # type: ignore
+
+    @distributed_trace
+    def get_by_ids_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_id,  # type: str
+        body=None,  # type: Optional["_models.GetRunsByIds"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchRunResult"
+        """get_by_ids_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_id=experiment_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace
+    def get_by_ids_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        experiment_name,  # type: str
+        body=None,  # type: Optional["_models.GetRunsByIds"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.BatchRunResult"
+        """get_by_ids_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.GetRunsByIds
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: BatchRunResult, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.BatchRunResult
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.BatchRunResult"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'GetRunsByIds')
+        else:
+            _json = None
+
+        request = build_get_by_ids_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            experiment_name=experiment_name,
+            content_type=content_type,
+            json=_json,
+            template_url=self.get_by_ids_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('BatchRunResult', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    get_by_ids_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/runIds"}  # type: ignore
+
+
+    @distributed_trace
+    def cancel_run_with_uri_by_experiment_id(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_id,  # type: str
+        cancelation_reason=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """cancel_run_with_uri_by_experiment_id.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_id:
+        :type experiment_id: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_id_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_id=experiment_id,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_id.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_id.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experimentids/{experimentId}/runs/{runId}/cancel"}  # type: ignore
+
+
+    @distributed_trace
+    def cancel_run_with_uri_by_experiment_name(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        experiment_name,  # type: str
+        cancelation_reason=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> "_models.Run"
+        """cancel_run_with_uri_by_experiment_name.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param experiment_name:
+        :type experiment_name: str
+        :param cancelation_reason:
+        :type cancelation_reason: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: Run, or the result of cls(response)
+        :rtype: ~azure.mgmt.machinelearningservices.models.Run
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.Run"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        
+        request = build_cancel_run_with_uri_by_experiment_name_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            experiment_name=experiment_name,
+            cancelation_reason=cancelation_reason,
+            template_url=self.cancel_run_with_uri_by_experiment_name.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in [200]:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        deserialized = self._deserialize('Run', pipeline_response)
+
+        if cls:
+            return cls(pipeline_response, deserialized, {})
+
+        return deserialized
+
+    cancel_run_with_uri_by_experiment_name.metadata = {'url': "/history/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/experiments/{experimentName}/runs/{runId}/cancel"}  # type: ignore
+
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_spans_operations.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_spans_operations.py
new file mode 100644
index 00000000..7245f5ed
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/operations/_spans_operations.py
@@ -0,0 +1,429 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+
+from msrest import Serializer
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpResponse
+from azure.core.rest import HttpRequest
+from azure.core.tracing.decorator import distributed_trace
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models as _models
+from .._vendor import _convert_request, _format_url_section
+
+if TYPE_CHECKING:
+    # pylint: disable=unused-import,ungrouped-imports
+    from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
+    T = TypeVar('T')
+    ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+# fmt: off
+
+def build_post_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    content_type = kwargs.pop('content_type', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    if content_type is not None:
+        _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="POST",
+        url=_url,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_list_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+
+def build_get_active_request(
+    subscription_id,  # type: str
+    resource_group_name,  # type: str
+    workspace_name,  # type: str
+    run_id,  # type: str
+    **kwargs  # type: Any
+):
+    # type: (...) -> HttpRequest
+    continuation_token_parameter = kwargs.pop('continuation_token_parameter', None)  # type: Optional[str]
+
+    accept = "application/json"
+    # Construct URL
+    _url = kwargs.pop("template_url", "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans/active")  # pylint: disable=line-too-long
+    path_format_arguments = {
+        "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
+        "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
+        "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str'),
+        "runId": _SERIALIZER.url("run_id", run_id, 'str'),
+    }
+
+    _url = _format_url_section(_url, **path_format_arguments)
+
+    # Construct parameters
+    _query_parameters = kwargs.pop("params", {})  # type: Dict[str, Any]
+    if continuation_token_parameter is not None:
+        _query_parameters['continuationToken'] = _SERIALIZER.query("continuation_token_parameter", continuation_token_parameter, 'str')
+
+    # Construct headers
+    _header_parameters = kwargs.pop("headers", {})  # type: Dict[str, Any]
+    _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
+
+    return HttpRequest(
+        method="GET",
+        url=_url,
+        params=_query_parameters,
+        headers=_header_parameters,
+        **kwargs
+    )
+
+# fmt: on
+class SpansOperations(object):
+    """SpansOperations operations.
+
+    You should not instantiate this class directly. Instead, you should create a Client instance that
+    instantiates it for you and attaches it as an attribute.
+
+    :ivar models: Alias to model classes used in this operation group.
+    :type models: ~azure.mgmt.machinelearningservices.models
+    :param client: Client for service requests.
+    :param config: Configuration of service client.
+    :param serializer: An object model serializer.
+    :param deserializer: An object model deserializer.
+    """
+
+    models = _models
+
+    def __init__(self, client, config, serializer, deserializer):
+        self._client = client
+        self._serialize = serializer
+        self._deserialize = deserializer
+        self._config = config
+
+    @distributed_trace
+    def post(  # pylint: disable=inconsistent-return-statements
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        body=None,  # type: Optional["_models.RunStatusSpans"]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> None
+        """post.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param body:
+        :type body: ~azure.mgmt.machinelearningservices.models.RunStatusSpans
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: None, or the result of cls(response)
+        :rtype: None
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType[None]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+
+        content_type = kwargs.pop('content_type', "application/json")  # type: Optional[str]
+
+        if body is not None:
+            _json = self._serialize.body(body, 'RunStatusSpans')
+        else:
+            _json = None
+
+        request = build_post_request(
+            subscription_id=subscription_id,
+            resource_group_name=resource_group_name,
+            workspace_name=workspace_name,
+            run_id=run_id,
+            content_type=content_type,
+            json=_json,
+            template_url=self.post.metadata['url'],
+        )
+        request = _convert_request(request)
+        request.url = self._client.format_url(request.url)
+
+        pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+            request,
+            stream=False,
+            **kwargs
+        )
+        response = pipeline_response.http_response
+
+        if response.status_code not in []:
+            map_error(status_code=response.status_code, response=response, error_map=error_map)
+            error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+            raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+        if cls:
+            return cls(pipeline_response, None, {})
+
+    post.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+
+    @distributed_trace
+    def list(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedSpanDefinition1List"]
+        """list.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.list.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_list_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    list.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans"}  # type: ignore
+
+    @distributed_trace
+    def get_active(
+        self,
+        subscription_id,  # type: str
+        resource_group_name,  # type: str
+        workspace_name,  # type: str
+        run_id,  # type: str
+        continuation_token_parameter=None,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Iterable["_models.PaginatedSpanDefinition1List"]
+        """get_active.
+
+        :param subscription_id: The Azure Subscription ID.
+        :type subscription_id: str
+        :param resource_group_name: The Name of the resource group in which the workspace is located.
+        :type resource_group_name: str
+        :param workspace_name: The name of the workspace.
+        :type workspace_name: str
+        :param run_id:
+        :type run_id: str
+        :param continuation_token_parameter:
+        :type continuation_token_parameter: str
+        :keyword callable cls: A custom type or function that will be passed the direct response
+        :return: An iterator like instance of either PaginatedSpanDefinition1List or the result of
+         cls(response)
+        :rtype:
+         ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedSpanDefinition1List]
+        :raises: ~azure.core.exceptions.HttpResponseError
+        """
+        cls = kwargs.pop('cls', None)  # type: ClsType["_models.PaginatedSpanDefinition1List"]
+        error_map = {
+            401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+        }
+        error_map.update(kwargs.pop('error_map', {}))
+        def prepare_request(next_link=None):
+            if not next_link:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=self.get_active.metadata['url'],
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+
+            else:
+                
+                request = build_get_active_request(
+                    subscription_id=subscription_id,
+                    resource_group_name=resource_group_name,
+                    workspace_name=workspace_name,
+                    run_id=run_id,
+                    continuation_token_parameter=continuation_token_parameter,
+                    template_url=next_link,
+                )
+                request = _convert_request(request)
+                request.url = self._client.format_url(request.url)
+                request.method = "GET"
+            return request
+
+        def extract_data(pipeline_response):
+            deserialized = self._deserialize("PaginatedSpanDefinition1List", pipeline_response)
+            list_of_elem = deserialized.value
+            if cls:
+                list_of_elem = cls(list_of_elem)
+            return deserialized.next_link or None, iter(list_of_elem)
+
+        def get_next(next_link=None):
+            request = prepare_request(next_link)
+
+            pipeline_response = self._client._pipeline.run(  # pylint: disable=protected-access
+                request,
+                stream=False,
+                **kwargs
+            )
+            response = pipeline_response.http_response
+
+            if response.status_code not in [200]:
+                map_error(status_code=response.status_code, response=response, error_map=error_map)
+                error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
+                raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+            return pipeline_response
+
+
+        return ItemPaged(
+            get_next, extract_data
+        )
+    get_active.metadata = {'url': "/history/v1.0/private/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/runs/{runId}/spans/active"}  # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/py.typed b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_restclient/runhistory/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file