about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/__init__.py19
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/azureml_image_context.py132
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/docker_client.py568
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_instructions.py86
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_resolver.py156
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/endpoint_stub.py127
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/local_endpoint_mode.py12
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/mdc_config_resolver.py87
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/__init__.py3
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/commandline_utility.py111
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/wsl_utility.py35
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/__init__.py11
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/code_validator.py114
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/environment_validator.py202
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/model_validator.py93
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/__init__.py3
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_properties.py156
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_resolver.py190
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/vscode_client.py51
19 files changed, 2156 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/__init__.py
new file mode 100644
index 00000000..4b51cd22
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/__init__.py
@@ -0,0 +1,19 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)  # type: ignore
+
+from .azureml_image_context import AzureMlImageContext
+from .docker_client import DockerClient
+from .dockerfile_resolver import DockerfileResolver
+from .endpoint_stub import EndpointStub
+from .local_endpoint_mode import LocalEndpointMode
+
+__all__ = [
+    "AzureMlImageContext",
+    "DockerClient",
+    "DockerfileResolver",
+    "EndpointStub",
+    "LocalEndpointMode",
+]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/azureml_image_context.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/azureml_image_context.py
new file mode 100644
index 00000000..1740cdf0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/azureml_image_context.py
@@ -0,0 +1,132 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+import logging
+import os
+from pathlib import Path
+from typing import Dict
+
+from azure.ai.ml.constants._endpoint import LocalEndpointConstants
+
+module_logger = logging.getLogger(__name__)
+
+
+class AzureMlImageContext(object):
+    """Entity holding context for building the Azure ML specific image.
+
+    :attribute docker_azureml_app_path: the name of the online endpoint
+    :type endpoint_name: str
+    :param deployment_name: the name of the deployment under online endpoint
+    :type deployment_name: str
+    :param yaml_code_directory_path: Local directory of user code files.
+        Originates in endpoint yaml configuration and is parsed by Endpoint schema.
+        Should be absolute path.
+    :type yaml_code_directory_path: str
+    :param yaml_code_scoring_script_path: File name of scoring script from endpoint yaml configuration.
+    :type yaml_code_scoring_script_path: str
+    :param yaml_model_file_path: Path of model file from endpoint yaml configuration and parsed by Endpoint schema.
+        Should be absolute path.
+    :type yaml_model_file_path: str
+    """
+
+    def __init__(
+        self,
+        endpoint_name: str,  # pylint: disable=unused-argument
+        deployment_name: str,  # pylint: disable=unused-argument
+        yaml_code_directory_path: str,
+        yaml_code_scoring_script_file_name: str,
+        model_directory_path: str,
+        model_mount_path: str = "",
+    ):
+        """Constructor for AzureMlImageContext.
+
+        :param endpoint_name: the name of the online endpoint
+        :type endpoint_name: str
+        :param deployment_name: the name of the deployment under online endpoint
+        :type deployment_name: str
+        :param yaml_code_directory_path: Local directory of user code files.
+            Originates in endpoint yaml configuration and is parsed by Endpoint schema.
+            Should be absolute path.
+        :type yaml_code_directory_path: str
+        :param yaml_code_scoring_script_path: File name of scoring script from endpoint yaml configuration.
+        :type yaml_code_scoring_script_path: str
+        :param model_directory_path: Path of model directory to be mounted. Should be absolute path.
+        :type model_directory_path: str
+        :return: AzureMlImageContext
+        """
+        self._docker_azureml_app_path = LocalEndpointConstants.AZUREML_APP_PATH
+
+        local_model_mount_path = str(model_directory_path)
+        docker_azureml_model_dir = f"{self.docker_azureml_app_path}azureml-models/{model_mount_path}"
+        self._volumes = {
+            f"{local_model_mount_path}:{docker_azureml_model_dir}:z": {
+                local_model_mount_path: {"bind": docker_azureml_model_dir}
+            },
+        }
+        self._environment = {
+            LocalEndpointConstants.ENVVAR_KEY_AZUREML_MODEL_DIR: docker_azureml_model_dir,
+            # ie. /var/azureml-app/azureml-models/
+            LocalEndpointConstants.ENVVAR_KEY_AZUREML_INFERENCE_PYTHON_PATH: LocalEndpointConstants.CONDA_ENV_BIN_PATH,
+        }
+
+        if yaml_code_directory_path:
+            local_code_mount_path = str(yaml_code_directory_path)
+            docker_code_folder_name = Path(yaml_code_directory_path).name
+            docker_code_mount_path = f"{self.docker_azureml_app_path}{docker_code_folder_name}/"
+            self._volumes.update(
+                {
+                    f"{local_code_mount_path}:{docker_code_mount_path}": {
+                        local_code_mount_path: {"bind": docker_code_mount_path}
+                    }
+                }
+            )
+            # Set the directory containing scoring script as AML_APP_ROOT/working directory
+            # ie. /var/azureml-app/onlinescoring
+            self._environment[LocalEndpointConstants.ENVVAR_KEY_AML_APP_ROOT] = os.path.join(
+                docker_code_mount_path, os.path.dirname(yaml_code_scoring_script_file_name)
+            )
+            self._environment[LocalEndpointConstants.ENVVAR_KEY_AZUREML_ENTRY_SCRIPT] = Path(
+                yaml_code_scoring_script_file_name
+            ).name  # ie. score.py
+
+        self.ports = {"5001/tcp": 5001}
+
+    @property
+    def docker_azureml_app_path(self) -> str:
+        """Returns the app path inside the local endpoint container.
+
+        :return: The app path
+        :rtype: str
+        """
+        return self._docker_azureml_app_path
+
+    @property
+    def docker_conda_file_name(self) -> str:
+        """Returns the name of the conda file to copy into docker image.
+
+        :return: The conda file name
+        :rtype: str
+        """
+        # pylint: disable=no-member
+        return self._docker_conda_file_name  # type: ignore[attr-defined]
+
+    @property
+    def volumes(self) -> Dict[str, Dict[str, Dict[str, str]]]:
+        """Returns the volumes to mount when running the Azure ML Image locally.
+
+        :return: The dict of volumes
+        :rtype: Dict[str, Dict[str, Dict[str, str]]]
+        """
+        return self._volumes
+
+    @property
+    def environment(self) -> Dict[str, str]:
+        """Returns the environment variables to set when running the Azure ML Image locally.
+
+
+        :return: A dict of environment variable names to values
+        :rtype: Dict[str, str]
+        """
+        return self._environment
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/docker_client.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/docker_client.py
new file mode 100644
index 00000000..beb9db71
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/docker_client.py
@@ -0,0 +1,568 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs
+# pylint: disable=client-accepts-api-version-keyword
+
+import json
+import logging
+import time
+from typing import Dict, List, Optional
+
+from azure.ai.ml._local_endpoints.local_endpoint_mode import LocalEndpointMode
+from azure.ai.ml._local_endpoints.vscode_debug.vscode_client import VSCodeClient
+from azure.ai.ml._utils._logger_utils import initialize_logger_info
+from azure.ai.ml._utils.utils import DockerProxy
+from azure.ai.ml.constants._endpoint import LocalEndpointConstants
+from azure.ai.ml.exceptions import (
+    DockerEngineNotAvailableError,
+    InvalidLocalEndpointError,
+    LocalEndpointImageBuildError,
+    LocalEndpointInFailedStateError,
+    LocalEndpointNotFoundError,
+    MultipleLocalDeploymentsFoundError,
+)
+
+docker = DockerProxy()
+module_logger = logging.getLogger(__name__)
+initialize_logger_info(module_logger, terminator="")
+
+DEFAULT_LABELS: Dict = {
+    LocalEndpointConstants.LABEL_KEY_AZUREML_LOCAL_ENDPOINT: "",
+    LocalEndpointConstants.LABEL_KEY_ENDPOINT_NAME: "",
+    LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_NAME: "",
+    LocalEndpointConstants.LABEL_KEY_ENDPOINT_JSON: "",
+    LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_JSON: "",
+    LocalEndpointConstants.LABEL_KEY_AZUREML_PORT: "",
+}
+
+
+class DockerClient(object):
+    """Client for interacting with User's Docker environment for local
+    endpoints."""
+
+    # pylint: disable=client-method-missing-type-annotations
+
+    def __init__(
+        self,
+        client: Optional["docker.DockerClient"] = None,  # type: ignore[name-defined]
+        vscode_client: Optional[VSCodeClient] = None,
+    ):
+        self._lazy_client = client
+        self._vscode_client = vscode_client if vscode_client else VSCodeClient()
+
+    @property
+    def _client(self) -> "docker.DockerClient":  # type: ignore[name-defined]
+        """Lazy initializer for docker-py client.
+
+        :return: docker.client.DockerClient
+        :raises: azure.ai.ml._local_endpoints.errors.DockerEngineNotAvailableError
+        """
+        if self._lazy_client is None:
+            try:
+                self._lazy_client = docker.from_env()
+            except docker.errors.DockerException as e:
+                if "Error while fetching server API version" in str(e):
+                    raise DockerEngineNotAvailableError() from e
+                raise
+        return self._lazy_client
+
+    def create_endpoint(
+        self,
+        endpoint_name: str,
+        endpoint_metadata: str,
+        build_directory: str,
+        image_name: str,
+        dockerfile_path: str,
+    ) -> None:
+        try:
+            self._client.images.build(path=build_directory, tag=image_name, dockerfile=dockerfile_path)
+        except docker.errors.BuildError:
+            pass
+        self.delete(endpoint_name=endpoint_name, verify_exists=False)
+
+        labels = DEFAULT_LABELS.copy()
+        labels[LocalEndpointConstants.LABEL_KEY_ENDPOINT_NAME] = endpoint_name
+        labels[LocalEndpointConstants.LABEL_KEY_ENDPOINT_JSON] = endpoint_metadata
+        container_name = _get_container_name(endpoint_name)
+        self._client.containers.run(
+            image_name,
+            name=container_name,
+            labels=labels,
+            detach=True,
+            tty=True,
+            publish_all_ports=True,
+        )
+
+    # pylint: disable=client-method-has-more-than-5-positional-arguments
+
+    def create_deployment(
+        self,
+        endpoint_name: str,
+        deployment_name: str,
+        endpoint_metadata: str,
+        deployment_metadata: str,
+        build_directory: str,
+        dockerfile_path: str,
+        conda_source_path: str,
+        conda_yaml_contents: str,
+        volumes: dict,
+        environment: dict,
+        azureml_port: int,
+        local_endpoint_mode: LocalEndpointMode,
+        prebuilt_image_name: Optional[str] = None,
+        local_enable_gpu: Optional[bool] = False,
+    ) -> None:
+        """Builds and runs an image from provided image context.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: str
+        :param endpoint_metadata: Endpoint entity information serialized.
+        :type endpoint_metadata: str
+        :param deployment_metadata: Deployment entity information serialized.
+        :type deployment_metadata: str
+        :param build_directory: directory on user's local system to write conda file
+        :type build_directory: str
+        :param dockerfile_path: directory on user's local system to write Dockerfile
+        :type dockerfile_path: str
+        :param conda_source_path: source of conda file (either path on user's local machine or environment ID)
+        :type conda_source_path: str
+        :param conda_yaml_contents: contents of user's conda file for docker build
+        :type conda_yaml_contents: str
+        :param volumes: dictionary of volumes to mount to docker container
+        :type volumes: dict
+        :param environment: dictionary of docker environment variables to set in container
+        :type environment: dict
+        :param azureml_port: Port exposed in Docker image for AzureML service.
+        :type azureml_port: int
+        :param local_endpoint_mode: Mode for how to create the local user container.
+        :type local_endpoint_mode: LocalEndpointMode
+        :param prebuilt_image_name: Name of pre-built image from customer if using BYOC flow.
+        :type prebuilt_image_name: str
+        :param local_enable_gpu: enable local container to access gpu
+        :type local_enable_gpu: bool
+        """
+        # Prepare image
+        if prebuilt_image_name is None:
+            image_name = _get_image_name(endpoint_name, deployment_name)
+            module_logger.debug("Building local image '%s'\n", image_name)
+            module_logger.debug("Build directory: '%s'\n", build_directory)
+            module_logger.debug("Dockerfile path: '%s'\n", dockerfile_path)
+            module_logger.debug("Image '%s' is built.", image_name)
+            self._build_image(
+                build_directory=build_directory,
+                image_name=image_name,
+                dockerfile_path=dockerfile_path,
+                conda_source_path=conda_source_path,
+                conda_yaml_contents=conda_yaml_contents,
+            )
+        else:
+            image_name = prebuilt_image_name
+            try:
+                self._client.images.get(image_name)
+            except docker.errors.ImageNotFound:
+                module_logger.info("\nDid not find image '%s' locally. Pulling from registry.\n", image_name)
+                try:
+                    self._client.images.pull(image_name)
+                except docker.errors.NotFound as e:
+                    raise InvalidLocalEndpointError(
+                        message=(
+                            f"Could not find image '{image_name}' locally or in registry. "
+                            "Please check your image name."
+                        ),
+                        no_personal_data_message=(
+                            "Could not find image locally or in registry. Please check your image name."
+                        ),
+                    ) from e
+
+        module_logger.info("\nStarting up endpoint")
+        # Delete container if exists
+        self.delete(endpoint_name=endpoint_name, verify_exists=False)
+
+        labels = get_container_labels(
+            endpoint_name=endpoint_name,
+            deployment_name=deployment_name,
+            endpoint_metadata=endpoint_metadata,  # type: ignore[arg-type]
+            deployment_metadata=deployment_metadata,  # type: ignore[arg-type]
+            azureml_port=azureml_port,
+        )
+        module_logger.debug("Setting labels: '%s'\n", labels)
+        module_logger.debug("Mounting volumes: '%s'\n", volumes)
+        module_logger.debug("Setting environment variables: '%s'\n", environment)
+        container_name = _get_container_name(endpoint_name, deployment_name)
+        device_requests = [docker.types.DeviceRequest(count=-1, capabilities=[["gpu"]])] if local_enable_gpu else None
+        container = self._client.containers.create(
+            image_name,
+            name=container_name,
+            labels=labels,
+            volumes=self._reformat_volumes(volumes),
+            environment=environment,
+            detach=True,
+            tty=True,
+            publish_all_ports=True,
+            device_requests=device_requests,
+        )
+        if local_endpoint_mode == LocalEndpointMode.VSCodeDevContainer:
+            try:
+                devcontainer_path = self._vscode_client.create_dev_container_json(
+                    azureml_container=container,
+                    endpoint_name=endpoint_name,
+                    deployment_name=deployment_name,
+                    build_directory=build_directory,
+                    image_name=image_name,
+                    environment=environment,
+                    volumes=volumes,  # type: ignore[arg-type]
+                    labels=labels,
+                )
+            finally:
+                # This pre-created container is only used for retrieving the entry script
+                # to add debugpy statements
+                container.remove()
+            app_path = environment[LocalEndpointConstants.ENVVAR_KEY_AML_APP_ROOT]
+            self._vscode_client.invoke_dev_container(devcontainer_path=devcontainer_path, app_path=app_path)
+            time.sleep(LocalEndpointConstants.DEFAULT_STARTUP_WAIT_TIME_SECONDS)
+        else:
+            container.start()
+            time.sleep(LocalEndpointConstants.DEFAULT_STARTUP_WAIT_TIME_SECONDS)
+            container.reload()
+            _validate_container_state(
+                endpoint_name=endpoint_name,
+                deployment_name=deployment_name,
+                container=container,
+            )
+            scoring_uri = self.get_scoring_uri(endpoint_name=endpoint_name, deployment_name=deployment_name)
+            module_logger.debug("Container '%s' is up and running at '%s'\n", container_name, scoring_uri)
+
+    def delete(
+        self,
+        endpoint_name: str,
+        deployment_name: Optional[str] = None,
+        verify_exists: bool = True,
+    ) -> None:
+        """Deletes local endpoint / deployment.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: (str, optional)
+        :param verify_exists: Verify that the endpoint exists on deletion. Default: True
+        :type verify_exists: (bool, optional)
+        :raises: azure.ai.ml._local_endpoints.errors.LocalEndpointNotFoundError
+        """
+        containers = self.list_containers(endpoint_name=endpoint_name, deployment_name=deployment_name)
+        if verify_exists and len(containers) == 0:
+            raise LocalEndpointNotFoundError(endpoint_name=endpoint_name, deployment_name=deployment_name)
+
+        for container in containers:
+            container.stop()
+            container.remove()
+            module_logger.debug("Endpoint container '%s' is removed.", container.name)
+
+    def get_endpoint(self, endpoint_name: str) -> Optional[dict]:
+        """Returns metadata for local endpoint or deployment.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :returns: JSON dict representing user provided endpoint input
+        :rtype: dict
+        """
+        container = self.get_endpoint_container(endpoint_name=endpoint_name)
+        if container is None:
+            raise LocalEndpointNotFoundError(endpoint_name=endpoint_name)
+        return get_endpoint_json_from_container(container=container)
+
+    def get_deployment(self, endpoint_name: str, deployment_name: Optional[str] = None) -> Optional[dict]:
+        """Returns metadata for local deployment.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: (str, optional)
+        :return: JSON dict representing user provided endpoint input
+        :rtype: dict
+        """
+        container = self.get_endpoint_container(endpoint_name=endpoint_name, deployment_name=deployment_name)
+        if container is None:
+            raise LocalEndpointNotFoundError(endpoint_name=endpoint_name, deployment_name=deployment_name)
+        return get_deployment_json_from_container(container=container)
+
+    def get_scoring_uri(self, endpoint_name: str, deployment_name: Optional[str] = None) -> Optional[str]:
+        """Returns scoring uri for local endpoint or deployment.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: (str, optional)
+        :raises: azure.ai.ml._local_endpoints.errors.LocalEndpointNotFoundError
+        :raises: azure.ai.ml._local_endpoints.errors.MultipleLocalDeploymentsFoundError
+        """
+        container = self.get_endpoint_container(
+            endpoint_name=endpoint_name,
+            deployment_name=deployment_name,
+            verify_single_deployment=True,
+        )
+        if container is None:
+            return None
+        _validate_container_state(
+            endpoint_name=endpoint_name,
+            deployment_name=str(deployment_name),
+            container=container,
+        )
+        return get_scoring_uri_from_container(container=container)
+
+    def logs(self, endpoint_name: str, deployment_name: str, lines: int) -> str:
+        """Returns logs from local deployment.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: str
+        :param lines: number of lines to retrieve from container logs
+        :type lines: int
+        :return: Deployment logs
+        :rtype: str
+        :raises: azure.ai.ml._local_endpoints.errors.LocalEndpointNotFoundError
+        """
+        container = self.get_endpoint_container(endpoint_name, deployment_name=deployment_name)
+        if container is None:
+            raise LocalEndpointNotFoundError(endpoint_name=endpoint_name, deployment_name=deployment_name)
+        return container.logs(tail=int(lines)).decode()
+
+    def list_containers(
+        self,
+        endpoint_name: Optional[str] = None,
+        deployment_name: Optional[str] = None,
+        include_stopped: bool = True,
+    ) -> list:
+        """Returns a list of local endpoints.
+
+        :param endpoint_name: Name of local endpoint. If none, all local endpoints will be returned.
+        :type endpoint_name: (str, optional)
+        :param deployment_name: Name of local deployment. If none, all deployments under endpoint will be returned.
+        :type deployment_name: (str, optional)
+        :param include_stopped: Include stopped containers. Default: True.
+        :type include_stopped: (str, optional)
+        :return: array of Container objects from docker-py library
+        :rtype: List[docker.models.containers.Container]
+        """
+        filters = {"label": [f"{LocalEndpointConstants.LABEL_KEY_AZUREML_LOCAL_ENDPOINT}"]}
+        if endpoint_name:
+            filters["label"].append(f"{LocalEndpointConstants.LABEL_KEY_ENDPOINT_NAME}={endpoint_name}")
+        if deployment_name:
+            filters["label"].append(f"{LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_NAME}={deployment_name}")
+
+        return self._client.containers.list(filters=filters, all=include_stopped)
+
+    def get_endpoint_container(
+        self,
+        endpoint_name: str,
+        deployment_name: Optional[str] = None,
+        verify_single_deployment: bool = False,
+        include_stopped: bool = True,
+    ) -> "docker.models.containers.Container":  # type: ignore[name-defined]
+        """Builds and runs an image from provided image context.
+
+        :param endpoint_name: name of local endpoint
+        :type endpoint_name: str
+        :param deployment_name: name of local deployment
+        :type deployment_name: (str, optional)
+        :param verify_single_deployment: Fail if more than one deployment container exists
+        :type verify_single_deployment: (bool, optional)
+        :param include_stopped: Include container even if it's stopped. Default: True.
+        :type include_stopped: (bool, optional)
+        :returns: The docker container
+        :rtype: docker.models.containers.Container
+        """
+        containers = self.list_containers(
+            endpoint_name=endpoint_name,
+            deployment_name=deployment_name,
+            include_stopped=include_stopped,
+        )
+        if len(containers) == 0:
+            return None
+        if len(containers) > 1 and verify_single_deployment:
+            raise MultipleLocalDeploymentsFoundError(endpoint_name=endpoint_name)
+        return containers[0]
+
+    def _build_image(
+        self,
+        build_directory: str,
+        image_name: str,
+        dockerfile_path: str,
+        conda_source_path: str,  # pylint: disable=unused-argument
+        conda_yaml_contents: str,  # pylint: disable=unused-argument
+    ) -> None:
+        try:
+            module_logger.info("\nBuilding Docker image from Dockerfile")
+            first_line = True
+            for status in self._client.api.build(
+                path=build_directory,
+                tag=image_name,
+                dockerfile=dockerfile_path,
+                pull=True,
+                decode=True,
+                quiet=False,
+            ):
+                if first_line:
+                    module_logger.info("\n")
+                    first_line = False
+                if "stream" in status:
+                    if "An unexpected error has occurred. Conda has prepared the above report." in status["stream"]:
+                        raise LocalEndpointImageBuildError(status["stream"])
+                    module_logger.info(status["stream"])
+
+                if "error" in status:
+                    module_logger.info(status["error"])
+                    raise LocalEndpointImageBuildError(status["error"])
+        except docker.errors.APIError as e:
+            raise LocalEndpointImageBuildError(e) from e
+        except Exception as e:
+            if isinstance(e, LocalEndpointImageBuildError):
+                raise
+            raise LocalEndpointImageBuildError(e) from e
+
+    def _reformat_volumes(self, volumes_dict: Dict[str, Dict[str, Dict[str, str]]]) -> List[str]:
+        """Returns a list of volumes to pass to docker.
+
+        :param volumes_dict: custom formatted dict of volumes to mount. We expect the keys to be unique. Example:
+            .. code-block:: python
+
+                {
+                    "codesrc:codedest": {
+                        "codesrc": {
+                            "bind": "codedest"
+                        }
+                    },
+                    "modelsrc:modeldest": {
+                        "modelsrc": {
+                            "bind": "modeldest"
+                        }
+                    }
+                }
+
+        :type volumes_dict: str
+        :return: list of volumes to pass to docker. Example:
+            .. code-block:: python
+
+                ["codesrc:codedest", "modelsrc:modeldest"]
+        :rtype: List[str]
+        """
+        return list(volumes_dict.keys())
+
+
+def get_container_labels(
+    endpoint_name: str,
+    deployment_name: str,
+    endpoint_metadata: dict,
+    deployment_metadata: dict,
+    azureml_port: int,
+) -> dict:
+    labels = DEFAULT_LABELS.copy()
+    labels[LocalEndpointConstants.LABEL_KEY_ENDPOINT_NAME] = endpoint_name
+    labels[LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_NAME] = deployment_name
+    labels[LocalEndpointConstants.LABEL_KEY_ENDPOINT_JSON] = endpoint_metadata
+    labels[LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_JSON] = deployment_metadata
+    labels[LocalEndpointConstants.LABEL_KEY_AZUREML_PORT] = str(azureml_port)
+    return labels
+
+
+def get_endpoint_json_from_container(
+    container: "docker.models.containers.Container",  # type: ignore[name-defined]
+) -> Optional[dict]:
+    if container:
+        data = container.labels[LocalEndpointConstants.LABEL_KEY_ENDPOINT_JSON]
+        return json.loads(data)
+    return None
+
+
+def get_deployment_json_from_container(
+    container: "docker.models.containers.Container",  # type: ignore[name-defined]
+) -> Optional[dict]:
+    if container:
+        data = container.labels[LocalEndpointConstants.LABEL_KEY_DEPLOYMENT_JSON]
+        return json.loads(data)
+    return None
+
+
+def get_status_from_container(container: "docker.models.containers.Container") -> str:  # type: ignore[name-defined]
+    """Returns status of container.
+
+    :param container: container of local Deployment
+    :type container: docker.models.containers.Container
+    :return: container status
+    :rtype: str
+    """
+    return container.status
+
+
+def get_scoring_uri_from_container(
+    container: "docker.models.containers.Container",  # type: ignore[name-defined]
+) -> str:
+    """Returns scoring_uri of container.
+
+    :param container: container of local Deployment
+    :type container: docker.models.containers.Container
+    :return: container scoring_uri
+    :rtype: str
+    """
+    port = 5001
+    # Example container.ports: {'5001/tcp': [{'HostIp': '0.0.0.0', 'HostPort': '5001'}],
+    # '8883/tcp': None, '8888/tcp': None }
+    if container is not None and container.ports is not None:
+        azureml_port = container.labels["azureml-port"]
+        for docker_port, host_addresses in container.ports.items():
+            if azureml_port in docker_port and host_addresses is not None:
+                for address in host_addresses:
+                    if "HostPort" in address:
+                        port = address["HostPort"]
+                        break
+    # TODO: resolve scoring path correctly
+    return f"http://localhost:{port}/score"
+
+
+def _get_image_name(endpoint_name: str, deployment_name: str) -> str:
+    """Returns an image name.
+
+    :param endpoint_name: name of local endpoint
+    :type endpoint_name: str
+    :param deployment_name: name of local deployment
+    :type deployment_name: str
+    :return: image name
+    :rtype: str
+    """
+    return f"{endpoint_name}:{deployment_name}"
+
+
+def _get_container_name(endpoint_name: str, deployment_name: Optional[str] = None) -> str:
+    """Returns a container name.
+
+    :param endpoint_name: name of local endpoint
+    :type endpoint_name: str
+    :param deployment_name: name of local deployment
+    :type deployment_name: str
+    :return: container name
+    :rtype: str
+    """
+    return f"{endpoint_name}.{deployment_name}" if deployment_name else endpoint_name
+
+
+def _validate_container_state(
+    endpoint_name: str,
+    deployment_name: str,
+    container: "docker.models.containers.Container",  # type: ignore[name-defined]
+):
+    """Returns a container name.
+
+    :param endpoint_name: name of local endpoint
+    :type endpoint_name: str
+    :param deployment_name: name of local deployment
+    :type deployment_name: str
+    :param container: container of local Deployment
+    :type container: docker.models.containers.Container
+    :raises: azure.ai.ml._local_endpoints.errors.LocalEndpointInFailedStateError
+    """
+    status = get_status_from_container(container=container)
+    if LocalEndpointConstants.CONTAINER_EXITED == status:
+        raise LocalEndpointInFailedStateError(endpoint_name=endpoint_name, deployment_name=deployment_name)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_instructions.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_instructions.py
new file mode 100644
index 00000000..487d8dee
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_instructions.py
@@ -0,0 +1,86 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+#
+# This file contains Dockerfile instructions as Python classes.
+# Using them as str(Cmd) for example will output the proper Dockerfile instruction as a string.
+
+from typing import Optional
+
+
+class Cmd(object):
+    """Python object representation of Docker CMD instruction."""
+
+    def __init__(self, command_array):
+        self.command_array = command_array
+
+    def __str__(self) -> str:
+        string_arr = [f'"{cmd}"' for cmd in self.command_array]
+        return f"CMD [{', '.join(string_arr)}]"
+
+
+class Copy(object):
+    """Python object representation of Docker COPY instruction."""
+
+    def __init__(self, src, dest):
+        self.src = src
+        self.dest = dest
+
+    def __str__(self) -> str:
+        from_str = " ".join(self.src)
+        return f"COPY {from_str} {self.dest}"
+
+
+class Env(object):
+    """Python object representation of Docker ENV instruction."""
+
+    def __init__(self, key, value):
+        self.key = key
+        self.value = value
+
+    def __str__(self) -> str:
+        return f"ENV {self.key}={self.value}"
+
+
+class Expose(object):
+    """Python object representation of Docker EXPOSE instruction."""
+
+    def __init__(self, port):
+        self.port = port
+
+    def __str__(self) -> str:
+        return f"EXPOSE {self.port}"
+
+
+class From(object):
+    """Python object representation of Docker FROM instruction."""
+
+    def __init__(self, base_image_name: str, stage_name: Optional[str] = None):
+        self.base_image = base_image_name
+        self.stage_name = stage_name
+
+    def __str__(self) -> str:
+        if self.stage_name is None:
+            return f"FROM {self.base_image}"
+
+        return f"FROM {self.base_image} as {self.stage_name}"
+
+
+class Run(object):
+    """Python object representation of Docker RUN instruction."""
+
+    def __init__(self, command: str):
+        self.command = command
+
+    def __str__(self) -> str:
+        return f"RUN {self.command}"
+
+
+class Workdir(object):
+    """Python object representation of Docker WORKDIR instruction."""
+
+    def __init__(self, directory: str):
+        self.directory = directory
+
+    def __str__(self) -> str:
+        return f"WORKDIR {self.directory}"
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_resolver.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_resolver.py
new file mode 100644
index 00000000..3f126e2d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/dockerfile_resolver.py
@@ -0,0 +1,156 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+import logging
+from pathlib import Path
+from typing import List, Optional
+
+from azure.ai.ml.constants._common import DefaultOpenEncoding
+from azure.ai.ml.constants._endpoint import LocalEndpointConstants
+
+from .dockerfile_instructions import Cmd, Copy, From, Run, Workdir
+
+module_logger = logging.getLogger(__name__)
+
+
+class DockerfileResolver(object):
+    """Represents the contents of a Dockerfile and handles writing the Dockerfile to User's system.
+
+    :param docker_base_image: name of local endpoint
+    :type docker_base_image: str
+    :param docker_conda_file_name: name of conda file to copy into docker image
+    :type docker_conda_file_name: str
+    :param docker_port: port to expose in docker image
+    :type docker_port: str
+    :param docker_azureml_app_path: path in docker image to user's azureml app
+    :type docker_azureml_app_path: (str, optional)
+    """
+
+    def __init__(
+        self,
+        docker_base_image: str,
+        dockerfile: str,
+        docker_conda_file_name: Optional[str] = None,
+        docker_port: Optional[str] = None,
+        docker_azureml_app_path: Optional[str] = None,
+        install_debugpy: bool = False,
+    ):
+        """Constructor of a Dockerfile object.
+
+        :param docker_base_image: base image
+        :type docker_base_image: str
+        :param dockerfile: contents of dockerfile
+        :type dockerfile: str
+        :param docker_conda_file_name: name of local endpoint
+        :type docker_conda_file_name: str
+        :param docker_port: port to expose in docker image
+        :type docker_port: str
+        :param docker_azureml_app_path: name of local deployment
+        :type docker_azureml_app_path: (str, optional)
+        :return DockerfileResolver:
+        """
+        self._instructions: List[object] = []
+        self._local_dockerfile_path: Optional[str] = None
+        self._dockerfile = dockerfile
+        self._docker_base_image = docker_base_image
+        self._docker_conda_file_name = docker_conda_file_name
+        self._docker_azureml_app_path = docker_azureml_app_path
+        self._docker_port = docker_port
+        self._construct(install_debugpy=install_debugpy)
+
+    @property
+    def local_path(self) -> Optional[str]:
+        """Returns the local dockerfile path.
+
+        :return: str
+        """
+        return self._local_dockerfile_path
+
+    def __str__(self) -> str:
+        """Override DockerfileResolver str() built-in func to return the Dockerfile contents as a string.
+
+        :return: Dockerfile Contents
+        :rtype: str
+        """
+        return "" if len(self._instructions) == 0 else "\n".join([str(instr) for instr in self._instructions])
+
+    def _construct(self, install_debugpy: bool = False) -> None:
+        """Internal use only.
+
+        Constructs the Dockerfile instructions based on properties.
+
+        :param install_debugpy: Whether to install debugpy. Defaults to False.
+        :type install_debugpy: bool
+        """
+        self._instructions = []
+        if self._docker_base_image:
+            self._instructions = [From(self._docker_base_image)]
+        else:
+            self._instructions = [self._dockerfile]
+        if self._docker_port:
+            self._instructions.extend(
+                [
+                    Run(f"mkdir -p {self._docker_azureml_app_path}"),
+                    Workdir(str(self._docker_azureml_app_path)),
+                ]
+            )
+
+        if self._docker_conda_file_name and self._docker_azureml_app_path:
+            self._instructions.extend(
+                [
+                    Copy(
+                        [
+                            f"{self._docker_conda_file_name}",
+                        ],
+                        self._docker_azureml_app_path,
+                    ),
+                    Run(
+                        (
+                            f"conda env create -n {LocalEndpointConstants.CONDA_ENV_NAME} "
+                            f"--file {self._docker_conda_file_name}"
+                        )
+                    ),
+                ]
+            )
+            if install_debugpy:
+                self._instructions.extend(
+                    [Run(f"conda run -n {LocalEndpointConstants.CONDA_ENV_NAME} pip install debugpy")]
+                )
+            self._instructions.extend(
+                [
+                    Cmd(
+                        [
+                            "conda",
+                            "run",
+                            "--no-capture-output",
+                            "-n",
+                            LocalEndpointConstants.CONDA_ENV_NAME,
+                            "runsvdir",
+                            "/var/runit",
+                        ]
+                    ),
+                ]
+            )
+        else:
+            if install_debugpy:
+                self._instructions.extend([Run("pip install debugpy")])
+            self._instructions.extend(
+                [
+                    Cmd(["runsvdir", "/var/runit"]),
+                ]
+            )
+
+    def write_file(self, directory_path: str, file_prefix: Optional[str] = None) -> None:
+        """Writes this Dockerfile to a file in provided directory and file name prefix.
+
+        :param directory_path: absolute path of local directory to write Dockerfile.
+        :type directory_path: str
+        :param file_prefix: name of Dockerfile prefix
+        :type file_prefix: str
+        """
+        file_name = f"{file_prefix}.Dockerfile" if file_prefix else "Dockerfile"
+        self._local_dockerfile_path = str(Path(directory_path, file_name).resolve())
+        with open(self._local_dockerfile_path, "w", encoding=DefaultOpenEncoding.WRITE) as f:
+            f.write(f"{str(self)}\n")
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/endpoint_stub.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/endpoint_stub.py
new file mode 100644
index 00000000..8c507134
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/endpoint_stub.py
@@ -0,0 +1,127 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+import json
+import shutil
+from pathlib import Path
+from typing import Iterable, Optional
+
+from azure.ai.ml.constants._common import DefaultOpenEncoding
+from azure.ai.ml.entities import OnlineEndpoint
+from azure.ai.ml.entities._load_functions import load_online_endpoint
+
+
+class EndpointStub:
+    """EndpointStub is a class for representing local endpoints which do not have deployments created under them yet.
+
+    To maintain a catalog of local endpoints, it writes a yaml file with the endpoint specification to the user's
+    machine in an idempotent, well-known location.
+    """
+
+    def create_or_update(self, endpoint: OnlineEndpoint) -> OnlineEndpoint:
+        """Create or update a local endpoint.
+
+        :param OnlineEndpoint endpoint: OnlineEndpoint entity to create or update.
+        :return: The provided endpoint
+        :rtype: OnlineEndpoint
+        """
+        self._create_endpoint_cache(endpoint=endpoint)
+        return endpoint
+
+    def get(self, endpoint_name: str) -> Optional[OnlineEndpoint]:
+        """Get a local endpoint.
+
+        :param str endpoint_name: Name of local endpoint to get.
+        :return: The specified Online Endpoint
+        :rtype: Optional[Endpoint]
+        """
+        endpoint_path = self._get_endpoint_cache_file(endpoint_name=endpoint_name)
+        if endpoint_path.exists():
+            return load_online_endpoint(source=endpoint_path)
+        return None
+
+    def list(self) -> Iterable[Path]:
+        """List all local endpoints.
+
+        :return: An iterable of paths to endpoints
+        :rtype: Iterable[Path]
+        """
+        endpoints = []
+        azureml_dir = self._get_inferencing_cache_dir()
+        for endpoint_file in azureml_dir.glob("*/*.json"):
+            endpoints.append(endpoint_file)
+        return endpoints
+
+    def delete(self, endpoint_name: str):
+        """Delete a local endpoint.
+
+        :param str endpoint_name: Name of local endpoint to delete.
+        """
+        build_directory = self._get_build_directory(endpoint_name=endpoint_name)
+        shutil.rmtree(build_directory)
+
+    def invoke(self):
+        """Invoke a local endpoint.
+
+        For an EndpointStub, it cannot invoke, so we return a helper message.
+
+        :return: Invocation result
+        :rtype: str
+        """
+        return (
+            "This local endpoint does not have any deployments, so it cannot be invoked."
+            "Please use 'az ml online-deployment create --local' before invoking."
+        )
+
+    def _create_endpoint_cache(self, endpoint: OnlineEndpoint) -> Path:
+        """Create or update a local endpoint cache.
+
+        :param OnlineEndpoint endpoint: OnlineEndpoint entity to create or update.
+        :return: The endpoint cache path
+        :rtype: Path
+        """
+        endpoint_cache_path = self._get_endpoint_cache_file(endpoint_name=str(endpoint.name))
+        endpoint_metadata = json.dumps(endpoint.dump())
+        endpoint_cache_path.write_text(endpoint_metadata, encoding=DefaultOpenEncoding.WRITE)
+        return endpoint_cache_path
+
+    def _get_endpoint_cache_file(self, endpoint_name: str) -> Path:
+        """Get a local endpoint cache Path. Idempotent.
+
+        :param str endpoint_name: Name of local endpoint to get local cache.
+        :return: path to cached endpoint file.
+        :rtype: Path
+        """
+        build_directory = self._create_build_directory(endpoint_name=endpoint_name)
+        return Path(build_directory, f"{endpoint_name}.json")
+
+    def _create_build_directory(self, endpoint_name: str) -> Path:
+        """Create or update a local endpoint build directory.
+
+        :param str endpoint_name: Name of local endpoint to get local directory.
+        :return: path to endpoint build directory.
+        :rtype: Path
+        """
+        build_directory = self._get_build_directory(endpoint_name=endpoint_name)
+        build_directory.mkdir(parents=True, exist_ok=True)
+        return build_directory
+
+    def _get_build_directory(self, endpoint_name: str) -> Path:
+        """Get a local endpoint build directory. Idempotent.
+
+        :param str endpoint_name: Name of local endpoint to get local directory.
+        :return: path to endpoint build directory.
+        :rtype: Path
+        """
+        return Path(self._get_inferencing_cache_dir(), endpoint_name)
+
+    @classmethod
+    def _get_inferencing_cache_dir(cls) -> Path:
+        """Get a local inferencing directory. Idempotent.
+
+        :return: path to local inferencing cache directory.
+        :rtype: Path
+        """
+        return Path(Path.home(), ".azureml", "inferencing")
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/local_endpoint_mode.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/local_endpoint_mode.py
new file mode 100644
index 00000000..bab6bec8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/local_endpoint_mode.py
@@ -0,0 +1,12 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import enum
+
+
+class LocalEndpointMode(enum.Enum):
+    ### This determines the mode of how the LocalEndpoint container will be created.
+    ###
+    DetachedContainer = 0
+    VSCodeDevContainer = 1
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/mdc_config_resolver.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/mdc_config_resolver.py
new file mode 100644
index 00000000..1cf17b74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/mdc_config_resolver.py
@@ -0,0 +1,87 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+import json
+import os.path
+from pathlib import Path
+from typing import Any, Dict
+
+from azure.ai.ml.constants._common import DefaultOpenEncoding
+from azure.ai.ml.entities._deployment.data_collector import DataCollector
+
+
+class MdcConfigResolver(object):
+    """Represents the contents of mdc config and handles writing the mdc configuration to User's system.
+
+    :param data_collector: model data collector entity
+    :type data_collector: DataCollector
+    """
+
+    def __init__(
+        self,
+        data_collector: DataCollector,
+    ):
+        self.environment_variables: Dict = {}
+        self.volumes: Dict = {}
+        self.mdc_config: Any = None
+        self.config_path = "/etc/mdc-config.json"
+        self.local_config_name = "mdc-config.json"
+        self._construct(data_collector)
+
+    def _construct(self, data_collector: DataCollector) -> None:
+        """Constructs the mdc configuration based on entity.
+
+        :param data_collector: The data collector
+        :type data_collector: DataCollector
+
+        .. note::
+
+            Internal use only.
+        """
+        if not data_collector.collections:
+            return
+
+        if len(data_collector.collections) <= 0:
+            return
+
+        sampling_percentage = int(data_collector.sampling_rate * 100) if data_collector.sampling_rate else 100
+
+        self.mdc_config = {"collections": {}, "runMode": "local"}
+        custom_logging_enabled = False
+        for k, v in data_collector.collections.items():
+            if v.enabled and v.enabled.lower() == "true":
+                lower_k = k.lower()
+
+                if lower_k not in ("request", "response"):
+                    custom_logging_enabled = True
+
+                self.mdc_config["collections"][lower_k] = {
+                    "enabled": True,
+                    "sampling_percentage": int(v.sampling_rate * 100) if v.sampling_rate else sampling_percentage,
+                }
+
+        if not custom_logging_enabled:
+            self.mdc_config = None
+            return
+
+        if data_collector.request_logging and data_collector.request_logging.capture_headers:
+            self.mdc_config["captureHeaders"] = data_collector.request_logging.capture_headers
+
+    def write_file(self, directory_path: str) -> None:
+        """Writes this mdc configuration to a file in provided directory.
+
+        :param directory_path: absolute path of local directory to write Dockerfile.
+        :type directory_path: str
+        """
+        if not self.mdc_config:
+            return
+
+        mdc_setting_path = str(Path(directory_path, self.local_config_name).resolve())
+        with open(mdc_setting_path, "w", encoding=DefaultOpenEncoding.WRITE) as f:
+            d = json.dumps(self.mdc_config)
+            f.write(f"{d}")
+
+        self.environment_variables = {"AZUREML_MDC_CONFIG_PATH": self.config_path}
+        local_path = os.path.join(directory_path, self.local_config_name)
+
+        self.volumes = {f"{local_path}:{self.config_path}:z": {local_path: {"bind": self.config_path}}}
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/__init__.py
new file mode 100644
index 00000000..d540fd20
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/__init__.py
@@ -0,0 +1,3 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/commandline_utility.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/commandline_utility.py
new file mode 100644
index 00000000..3f41e5f0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/commandline_utility.py
@@ -0,0 +1,111 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+import json
+import os
+import subprocess
+import sys
+import time
+
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, MlException
+
+
+def _print_command_results(test_passed, time_taken, output):
+    print("Command {} in {} seconds.".format("successful" if test_passed else "failed", time_taken))
+    print("Output: \n{}\n".format(output))
+
+
+def run_cli_command(
+    cmd_arguments,
+    custom_environment=None,
+    return_json=False,
+    timeout=None,
+    do_not_print=True,
+    stderr_to_stdout=True,
+):
+    if not custom_environment:
+        custom_environment = os.environ
+
+    # We do this join to construct a command because "shell=True" flag, used below, doesn't work with the vector
+    # argv form on a mac OS.
+    command_to_execute = " ".join(cmd_arguments)
+
+    if not do_not_print:  # Avoid printing the az login service principal password, for example
+        print("Preparing to run CLI command: \n{}\n".format(command_to_execute))
+        print("Current directory: {}".format(os.getcwd()))
+
+    start_time = time.time()
+    try:
+        # We redirect stderr to stdout, so that in the case of an error, especially in negative tests,
+        # we get the error reply back to check if the error is expected or not.
+        # We need "shell=True" flag so that the "az" wrapper works.
+
+        # We also pass the environment variables, because for some tests we modify
+        # the environment variables.
+
+        subprocess_args = {
+            "shell": True,
+            "stderr": subprocess.STDOUT,
+            "env": custom_environment,
+        }
+
+        if not stderr_to_stdout:
+            subprocess_args = {"shell": True, "env": custom_environment}
+
+        if sys.version_info[0] != 2:
+            subprocess_args["timeout"] = timeout
+
+        output = subprocess.check_output(command_to_execute, **subprocess_args).decode(encoding="UTF-8")
+
+        time_taken = time.time() - start_time
+        if not do_not_print:
+            _print_command_results(True, time_taken, output)
+
+        if return_json:
+            try:
+                return json.loads(exclude_warnings(output))
+            except Exception as e:
+                msg = "Expected JSON, instead got: \n{}\n"
+                raise MlException(
+                    message=msg.format(output),
+                    no_personal_data_message=msg.format("[something else]"),
+                    target=ErrorTarget.LOCAL_ENDPOINT,
+                    error_category=ErrorCategory.SYSTEM_ERROR,
+                ) from e
+        else:
+            return output
+    except subprocess.CalledProcessError as e:
+        time_taken = time.time() - start_time
+        output = e.output.decode(encoding="UTF-8")
+        if not do_not_print:
+            _print_command_results(False, time_taken, output)
+
+        raise e
+
+
+def exclude_warnings(cmd_output):
+    json_output = ""
+    start_index = None
+    end_index = None
+    curr_index = 0
+    for cmd_line in cmd_output.splitlines():
+        if cmd_line.startswith("{") and start_index is None:
+            start_index = curr_index
+
+        if cmd_line.startswith("}"):
+            end_index = curr_index
+
+        curr_index = curr_index + 1
+
+    curr_index = 0
+    for cmd_line in cmd_output.splitlines():
+        if start_index <= curr_index <= end_index:
+            if len(json_output) == 0:
+                json_output = cmd_line
+            else:
+                json_output = json_output + "\n" + cmd_line
+
+        curr_index = curr_index + 1
+
+    return json_output
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/wsl_utility.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/wsl_utility.py
new file mode 100644
index 00000000..33bb6036
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/utilities/wsl_utility.py
@@ -0,0 +1,35 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+from platform import uname
+
+from azure.ai.ml._local_endpoints.utilities.commandline_utility import run_cli_command
+
+
+def in_wsl() -> bool:
+    """WSL is thought to be the only common Linux kernel with Microsoft in the
+    name, per Microsoft:
+
+    https://github.com/microsoft/WSL/issues/4071#issuecomment-496715404
+
+    :return: True if running in WSL
+    :rtype: bool
+    """
+    return "microsoft" in uname().release.lower()
+
+
+def get_wsl_path(path: str) -> str:
+    """Converts a WSL unix path to a Windows Path
+
+    Input /home/username/ for example.
+    Output /mnt/c/users/username
+
+    :param path: The UNIX path
+    :type path: str
+    :return: A Windows Path
+    :rtype: str
+    """
+    windows_path = run_cli_command(["wslpath", "-w", path])
+    return windows_path
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/__init__.py
new file mode 100644
index 00000000..dc6af819
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/__init__.py
@@ -0,0 +1,11 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)  # type: ignore
+
+from .code_validator import get_code_configuration_artifacts
+from .environment_validator import get_environment_artifacts
+from .model_validator import get_model_artifacts
+
+__all__ = ["get_code_configuration_artifacts", "get_environment_artifacts", "get_model_artifacts"]
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/code_validator.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/code_validator.py
new file mode 100644
index 00000000..bbe7c971
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/code_validator.py
@@ -0,0 +1,114 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from pathlib import Path
+from typing import Optional, Union
+
+from azure.ai.ml._artifacts._artifact_utilities import download_artifact_from_storage_url
+from azure.ai.ml._utils._arm_id_utils import parse_prefixed_name_version
+from azure.ai.ml._utils.utils import is_url
+from azure.ai.ml.constants._common import ARM_ID_PREFIX
+from azure.ai.ml.entities import OnlineDeployment
+from azure.ai.ml.entities._deployment.code_configuration import CodeConfiguration
+from azure.ai.ml.exceptions import RequiredLocalArtifactsNotFoundError
+from azure.ai.ml.operations._code_operations import CodeOperations
+
+
+def get_code_configuration_artifacts(
+    endpoint_name: str,
+    deployment: OnlineDeployment,
+    code_operations: CodeOperations,
+    download_path: str,
+) -> Optional[Union[str, Path]]:
+    """Validates and returns code artifacts from deployment specification.
+
+    :param endpoint_name: name of endpoint which this deployment is linked to
+    :type endpoint_name: str
+    :param deployment: deployment to validate
+    :type deployment: OnlineDeployment
+    :param code_operations: The code operations
+    :type code_operations: CodeOperations
+    :param download_path: The path to download to
+    :type download_path: str
+    :return: local path to code
+    :rtype: str
+    :raises: azure.ai.ml._local_endpoints.errors.RequiredLocalArtifactsNotFoundError
+    :raises: azure.ai.ml._local_endpoints.errors.CloudArtifactsNotSupportedError
+    """
+    # Validate code for local endpoint
+    if not deployment.code_configuration:
+        return None
+
+    if not isinstance(deployment.code_configuration, CodeConfiguration):
+        raise RequiredLocalArtifactsNotFoundError(
+            endpoint_name=endpoint_name,
+            required_artifact="code_configuration",
+            required_artifact_type=str(str),
+            deployment_name=deployment.name,
+        )
+
+    if _code_configuration_contains_cloud_artifacts(deployment=deployment):
+        return _get_cloud_code_configuration_artifacts(
+            str(deployment.code_configuration.code), code_operations, download_path
+        )
+
+    if not _local_code_path_is_valid(deployment=deployment):
+        raise RequiredLocalArtifactsNotFoundError(
+            endpoint_name=endpoint_name,
+            required_artifact="code_configuration.code",
+            required_artifact_type=str(str),
+            deployment_name=deployment.name,
+        )
+    if not _local_scoring_script_is_valid(deployment=deployment):
+        raise RequiredLocalArtifactsNotFoundError(
+            endpoint_name=endpoint_name,
+            required_artifact="code_configuration.scoring_script",
+            required_artifact_type=str(str),
+            deployment_name=deployment.name,
+        )
+    return _get_local_code_configuration_artifacts(deployment)
+
+
+def _local_code_path_is_valid(deployment: OnlineDeployment):
+    return (
+        deployment.code_configuration
+        and deployment.code_configuration.code
+        and isinstance(deployment.code_configuration.code, str)
+        and _get_local_code_configuration_artifacts(deployment).exists()
+    )
+
+
+def _local_scoring_script_is_valid(deployment: OnlineDeployment):
+    return deployment.code_configuration and deployment.code_configuration.scoring_script
+
+
+def _code_configuration_contains_cloud_artifacts(deployment: OnlineDeployment):
+    # If the deployment.code_configuration.code is a string, then it is the cloud code artifact name or full arm ID
+
+    return isinstance(deployment.code_configuration.code, str) and (  # type: ignore[union-attr]
+        is_url(deployment.code_configuration.code)  # type: ignore[union-attr]
+        or deployment.code_configuration.code.startswith(ARM_ID_PREFIX)  # type: ignore[union-attr]
+    )
+
+
+def _get_local_code_configuration_artifacts(
+    deployment: OnlineDeployment,
+) -> Path:
+    return Path(
+        deployment._base_path, deployment.code_configuration.code  # type: ignore[union-attr, arg-type]
+    ).resolve()
+
+
+def _get_cloud_code_configuration_artifacts(code: str, code_operations: CodeOperations, download_path: str) -> str:
+    name, version = parse_prefixed_name_version(code)
+    code_asset = code_operations.get(name=name, version=version)
+
+    return download_artifact_from_storage_url(
+        blob_url=code_asset.path,
+        destination=download_path,
+        datastore_operation=code_operations._datastore_operation,
+        datastore_name=None,  # Use default datastore of current workspace
+    )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/environment_validator.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/environment_validator.py
new file mode 100644
index 00000000..cc739149
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/environment_validator.py
@@ -0,0 +1,202 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+import os
+from pathlib import Path
+from typing import Optional, Tuple, Union
+
+from azure.ai.ml._artifacts._artifact_utilities import download_artifact_from_storage_url
+from azure.ai.ml._utils._arm_id_utils import parse_name_label, parse_name_version
+from azure.ai.ml._utils.utils import dump_yaml, is_url
+from azure.ai.ml.constants._common import DefaultOpenEncoding
+from azure.ai.ml.entities import OnlineDeployment
+from azure.ai.ml.entities._assets.environment import BuildContext, Environment
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, RequiredLocalArtifactsNotFoundError, ValidationException
+from azure.ai.ml.operations._environment_operations import EnvironmentOperations
+
+
+def get_environment_artifacts(
+    endpoint_name: str,
+    deployment: OnlineDeployment,
+    environment_operations: EnvironmentOperations,
+    download_path: str,
+) -> Optional[Tuple]:
+    """Validates and returns artifacts from environment specification.
+
+    :param endpoint_name: name of endpoint which this deployment is linked to
+    :type endpoint_name: str
+    :param deployment: deployment to validate
+    :type deployment: OnlineDeployment
+    :param environment_operations: The environment operations
+    :type environment_operations: EnvironmentOperations
+    :param download_path: The path to download to
+    :type download_path: str
+    :return: (base_image, conda_file_path, conda_file_contents, build_directory,
+        dockerfile_contents, inference_config)
+
+        Either base_image or build_directory should be None.
+    :rtype: Union[
+            Tuple[str, Optional[Path], str, None, None, Optional[Dict]],
+            Tuple[None, None, None, Path, str, Optional[Dict]]
+        ]
+    :raises: azure.ai.ml._local_endpoints.errors.RequiredLocalArtifactsNotFoundError
+    :raises: azure.ai.ml._local_endpoints.errors.CloudArtifactsNotSupportedError
+    """
+    # Validate environment for local endpoint
+    if _environment_contains_cloud_artifacts(deployment=deployment):
+        if isinstance(deployment.environment, Environment):
+            environment_asset = deployment.environment
+        else:
+            name, version = parse_name_version(deployment.environment)
+            label = None
+            if not version:
+                name, label = parse_name_label(deployment.environment)
+            environment_asset = environment_operations.get(name=name, version=version, label=label)
+
+        if not _cloud_environment_is_valid(environment=environment_asset):
+            msg = (
+                "Cloud environment must have environment.image "
+                "or the environment.build.path set to work for local endpoints."
+                " Note: Curated environments are not supported for local deployments."
+            )
+            raise ValidationException(
+                message=msg,
+                no_personal_data_message=msg,
+                target=ErrorTarget.LOCAL_ENDPOINT,
+                error_category=ErrorCategory.USER_ERROR,
+            )
+        return _get_cloud_environment_artifacts(
+            environment_operations=environment_operations,
+            environment_asset=environment_asset,
+            download_path=download_path,
+        )
+    if not _local_environment_is_valid(deployment=deployment):
+        raise RequiredLocalArtifactsNotFoundError(
+            endpoint_name=endpoint_name,
+            required_artifact="environment.image or environment.build.path",
+            required_artifact_type=str(Environment),
+            deployment_name=deployment.name,
+        )
+    return _get_local_environment_artifacts(deployment.base_path, deployment.environment)  # type: ignore[arg-type]
+
+
+def _get_cloud_environment_artifacts(
+    environment_operations: EnvironmentOperations,
+    environment_asset: Environment,
+    download_path: str,
+) -> Tuple:
+    """Retrieves the cloud environment's artifacts
+
+    :param environment_operations: The environment operations
+    :type environment_operations: EnvironmentOperations
+    :param environment_asset: The cloud environment
+    :type environment_asset: Environment
+    :param download_path: The path to download to
+    :type download_path: str
+    :return: (base_image, conda_file_path, conda_file_contents, build_directory,
+        dockerfile_contents, inference_config)
+
+        Either base_image or build_directory should be None.
+    :rtype: Union[
+            Tuple[str, Optional[Path], str, None, None, Optional[Dict]],
+            Tuple[None, None, None, Path, str, Optional[Dict]]
+        ]
+    """
+    if environment_asset.build and environment_asset.build.path and is_url(environment_asset.build.path):
+        environment_build_directory = download_artifact_from_storage_url(
+            blob_url=str(environment_asset.build.path),
+            destination=download_path,
+            datastore_operation=environment_operations._datastore_operation,
+            datastore_name="workspaceartifactstore",
+        )
+        dockerfile_path = Path(environment_build_directory, str(environment_asset.build.dockerfile_path))
+        dockerfile_contents = dockerfile_path.read_text(encoding=DefaultOpenEncoding.READ)
+        return (
+            None,
+            None,
+            None,
+            environment_build_directory,
+            dockerfile_contents,
+            environment_asset.inference_config,
+        )
+    conda_file_contents = dump_yaml(environment_asset.conda_file) if environment_asset.conda_file else None
+    return (
+        environment_asset.image,
+        environment_asset.id,
+        conda_file_contents,
+        None,
+        None,
+        environment_asset.inference_config,
+    )
+
+
+def _get_local_environment_artifacts(base_path: Union[str, os.PathLike], environment: Environment) -> Optional[Tuple]:
+    """Retrieves the local environment's artifacts
+
+    :param base_path: The base path
+    :type base_path: Union[str, os.PathLike]
+    :param environment: The local environment
+    :type environment: Environment
+    :return: (base_image, conda_file_path, conda_file_contents, build_directory,
+        dockerfile_contents, inference_config)
+
+        Either base_image or build_directory should be None.
+    :rtype: Union[
+            Tuple[str, Optional[Path], str, None, None, Optional[Dict]],
+            Tuple[None, None, None, Path, str, Optional[Dict]]
+        ]
+    """
+    if environment.image:
+        conda_file_contents = dump_yaml(environment.conda_file)
+        return (
+            environment.image,
+            environment._conda_file_path,
+            conda_file_contents,
+            None,
+            None,
+            environment.inference_config,
+        )
+    if environment.build and environment.build.dockerfile_path:
+        absolute_build_directory = Path(base_path, str(environment.build.path)).resolve()
+        absolute_dockerfile_path = Path(absolute_build_directory, environment.build.dockerfile_path).resolve()
+        dockerfile_contents = absolute_dockerfile_path.read_text(encoding=DefaultOpenEncoding.READ)
+        return (
+            None,
+            None,
+            None,
+            absolute_build_directory,
+            dockerfile_contents,
+            environment.inference_config,
+        )
+
+    return None
+
+
+def _local_environment_is_valid(deployment: OnlineDeployment):
+    return isinstance(deployment.environment, Environment) and (
+        deployment.environment.image
+        or (
+            deployment.environment.build is not None
+            and isinstance(deployment.environment.build, BuildContext)
+            and _local_build_context_is_valid(deployment.environment.build)
+        )
+    )
+
+
+def _local_build_context_is_valid(build_context: BuildContext):
+    return build_context.path is not None
+
+
+def _cloud_environment_is_valid(environment: Environment):
+    return isinstance(environment, Environment) and (
+        environment.image or (environment.build and environment.build.path)
+    )
+
+
+def _environment_contains_cloud_artifacts(deployment: OnlineDeployment):
+    return isinstance(deployment.environment, str) or (
+        deployment.environment is not None and deployment.environment.id is not None
+    )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/model_validator.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/model_validator.py
new file mode 100644
index 00000000..40ed2df7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/validators/model_validator.py
@@ -0,0 +1,93 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+# pylint: disable=protected-access
+
+from os import PathLike
+from pathlib import Path
+from typing import Tuple, Union
+
+from azure.ai.ml._artifacts._artifact_utilities import download_artifact
+from azure.ai.ml._utils._arm_id_utils import parse_prefixed_name_version
+from azure.ai.ml._utils._storage_utils import AzureMLDatastorePathUri
+from azure.ai.ml.entities import OnlineDeployment
+from azure.ai.ml.entities._assets import Model
+from azure.ai.ml.exceptions import RequiredLocalArtifactsNotFoundError
+from azure.ai.ml.operations._model_operations import ModelOperations
+
+
+def get_model_artifacts(
+    endpoint_name: str,
+    deployment: OnlineDeployment,
+    model_operations: ModelOperations,
+    download_path: str,
+) -> Union[str, Tuple]:
+    """Validates and returns model artifacts from deployment specification.
+
+    :param endpoint_name: name of endpoint which this deployment is linked to
+    :type endpoint_name: str
+    :param deployment: deployment to validate
+    :type deployment: OnlineDeployment
+    :param model_operations: The model operations
+    :type model_operations: ModelOperations
+    :param download_path: The path to download to
+    :type download_path: str
+    :return: (model name, model version, the local directory of the model artifact)
+    :rtype: Tuple[str, str, Path]
+    :raises: azure.ai.ml._local_endpoints.errors.RequiredLocalArtifactsNotFoundError
+    :raises: azure.ai.ml._local_endpoints.errors.CloudArtifactsNotSupportedError
+    """
+    # Validate model for local endpoint
+    if _model_contains_cloud_artifacts(deployment=deployment):
+        return _get_cloud_model_artifacts(
+            model_operations=model_operations,
+            model=str(deployment.model),
+            download_path=download_path,
+        )
+    if not _local_model_is_valid(deployment=deployment):
+        raise RequiredLocalArtifactsNotFoundError(
+            endpoint_name=endpoint_name,
+            required_artifact="model.path",
+            required_artifact_type=str,
+            deployment_name=deployment.name,
+        )
+    _model: Model = deployment.model  # type: ignore[assignment]
+    _model_path: Union[str, PathLike] = _model.path  # type: ignore[assignment]
+    return (
+        _model.name,
+        _model.version,
+        Path(deployment._base_path, _model_path).resolve().parent,
+    )
+
+
+def _local_model_is_valid(deployment: OnlineDeployment):
+    return deployment.model and isinstance(deployment.model, Model) and deployment.model.path
+
+
+def _model_contains_cloud_artifacts(deployment: OnlineDeployment):
+    # If the deployment.model is a string, then it is the cloud model name or full arm ID
+    return isinstance(deployment.model, str) or (deployment.model is not None and deployment.model.id is not None)
+
+
+def _get_cloud_model_artifacts(model_operations: ModelOperations, model: str, download_path: str) -> Tuple:
+    if isinstance(model, Model):
+        name = model.name
+        version = model._version
+        model_asset = model
+    else:
+        name, version = parse_prefixed_name_version(model)
+        model_asset = model_operations.get(name=name, version=version)
+    model_uri_path = AzureMLDatastorePathUri(model_asset.path)
+    path = Path(model_uri_path.path)
+    starts_with = path if path.is_dir() else path.parent
+    return (
+        name,
+        version,
+        download_artifact(
+            starts_with=starts_with,
+            destination=download_path,
+            datastore_operation=model_operations._datastore_operation,
+            datastore_name=model_uri_path.datastore,
+        ),
+    )
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/__init__.py
new file mode 100644
index 00000000..d540fd20
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/__init__.py
@@ -0,0 +1,3 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_properties.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_properties.py
new file mode 100644
index 00000000..fa60d379
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_properties.py
@@ -0,0 +1,156 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+#
+# This file contains devcontainer.json properties as Python classes.
+# Reference: https://code.visualstudio.com/docs/remote/devcontainerjson-reference
+
+
+from typing import Dict, Optional
+
+from azure.ai.ml.constants._endpoint import LocalEndpointConstants
+
+
+class Image(object):
+    """Python object representation of devcontainer image property."""
+
+    def __init__(self, image: str):
+        self._image = image
+
+    def to_dict(self) -> dict:
+        return {"image": self._image}
+
+
+class Build(object):
+    """Python object representation of devcontainer build.dockerfile property."""
+
+    def __init__(
+        self,
+        dockerfile_path: str,
+        build_context: Optional[str] = None,
+        args: Optional[dict] = None,
+        target: Optional[str] = None,
+    ):
+        self._dockerfile_path = dockerfile_path
+        self._build_context = build_context
+        self._args = args
+        self._target = target
+
+    def to_dict(self) -> dict:
+        build: Dict = {
+            "build": {
+                "dockerfile": self._dockerfile_path,
+            }
+        }
+        if self._build_context:
+            build["build"]["context"] = self._build_context
+        if self._args:
+            build["build"]["args"] = self._args
+        if self._target:
+            build["build"]["target"] = self._target
+        return build
+
+
+class ContainerEnv(object):
+    """Python object representation of devcontainer containerEnv property."""
+
+    def __init__(self, environment_variables: dict):
+        self._environment_variables = environment_variables
+
+    def to_dict(self) -> dict:
+        return {"containerEnv": self._environment_variables}
+
+
+class Mounts(object):
+    """Python object representation of devcontainer mounts property."""
+
+    def __init__(self, mounts: list):
+        self._mounts = mounts
+
+    def to_dict(self) -> dict:
+        return {"mounts": self._mounts}
+
+
+class Name(object):
+    """Python object representation of devcontainer name property."""
+
+    def __init__(self, name: str):
+        self._name = name
+
+    def to_dict(self) -> dict:
+        return {"name": self._name}
+
+
+class ForwardPorts(object):
+    """Python object representation of devcontainer name property."""
+
+    def __init__(self, port: int):
+        self._port = port
+
+    def to_dict(self) -> dict:
+        return {"forwardPorts": [self._port]}
+
+
+class AppPort(object):
+    """Python object representation of devcontainer name property."""
+
+    def __init__(self, port: int):
+        self._port = port
+
+    def to_dict(self) -> dict:
+        return {"appPort": [self._port]}
+
+
+class RunArgs(object):
+    """Python object representation of devcontainer runArgs property."""
+
+    def __init__(self, name: Optional[str] = None, labels: Optional[list] = None):
+        labels = labels or []
+        self._run_args = labels
+        if name:
+            self._run_args.append(f"--name={name}")
+
+    def to_dict(self) -> dict:
+        return {"runArgs": self._run_args}
+
+
+class OverrideCommand(object):
+    def __init__(self):
+        pass
+
+    def to_dict(self) -> dict:
+        return {"overrideCommand": True}
+
+
+class Extensions(object):
+    def __init__(self):
+        pass
+
+    def to_dict(self) -> dict:
+        return {"extensions": ["ms-python.python", "ms-toolsai.vscode-ai-inference"]}
+
+
+class Settings(object):
+    def __init__(self):
+        pass
+
+    def to_dict(self) -> dict:
+        return {
+            "settings": {
+                "launch": {
+                    "configurations": [
+                        {
+                            "name": "Azure ML: Debug Local Endpoint",
+                            "type": "python",
+                            "request": "attach",
+                            "listen": {
+                                "host": "127.0.0.1",
+                                "port": 0,
+                            },
+                            "azuremlext": "local_inference_debug",
+                        }
+                    ]
+                },
+                "python.defaultInterpreterPath": LocalEndpointConstants.CONDA_ENV_PYTHON_PATH,
+            }
+        }
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_resolver.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_resolver.py
new file mode 100644
index 00000000..0c6a77dc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/devcontainer_resolver.py
@@ -0,0 +1,190 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+
+import json
+from pathlib import Path
+from typing import Dict, List, Optional
+
+from azure.ai.ml._local_endpoints.utilities.wsl_utility import get_wsl_path, in_wsl
+from azure.ai.ml._local_endpoints.vscode_debug.devcontainer_properties import (
+    AppPort,
+    Build,
+    ContainerEnv,
+    Extensions,
+    ForwardPorts,
+    Image,
+    Mounts,
+    OverrideCommand,
+    RunArgs,
+    Settings,
+)
+from azure.ai.ml.constants._common import DefaultOpenEncoding
+from azure.ai.ml.exceptions import ErrorCategory, ErrorTarget, ValidationException
+
+
+class DevContainerResolver:
+    """DevContainerResolver class represents the collection of properties of a devcontainer.json.
+
+    Reference: https://code.visualstudio.com/docs/remote/devcontainerjson-reference
+    """
+
+    def __init__(
+        self,
+        image: Optional[str] = None,
+        dockerfile_path: str = "../Dockerfile",
+        build_context: Optional[str] = None,
+        build_target: Optional[str] = None,
+        environment: Optional[dict] = None,
+        mounts: Optional[dict] = None,
+        labels: Optional[dict] = None,
+        port: int = 5001,
+    ):
+        """Resolves the devcontainer.json based on provided properties.
+
+        :param image: name of local deployment
+        :type image: str
+        :param dockerfile_path: path to Dockerfile relative to devcontainer.json
+        :type dockerfile_path: str
+        :param build_context: build directory on user's local system
+        :type build_context: str
+        :param build_target: directory on user's local system where Dockerfile is located
+        :type build_target: str
+        :param environment: dictionary of docker environment variables to set in dev container
+        :type environment: dict
+        :param mounts: dictionary of volumes to mount to dev container
+        :type mounts: dict
+        :param labels: dictionary of labels to add to dev container
+        :type labels: dict
+        :param port: Port exposed in Docker image for AzureML service.
+        :type port: int
+        """
+        if not (image or (build_context and dockerfile_path)):
+            msg = "Must provide image or build context for devcontainer.json"
+            raise ValidationException(
+                message=msg,
+                no_personal_data_message=msg,
+                target=ErrorTarget.LOCAL_ENDPOINT,
+                error_category=ErrorCategory.USER_ERROR,
+            )
+        self._local_path: Optional[str] = None
+        self._properties: Optional[dict] = {}
+
+        self._image: Optional[str] = image
+        self._dockerfile_path: str = dockerfile_path
+        self._build_context: Optional[str] = build_context
+        self._build_target: Optional[str] = build_target
+        self._environment: Optional[dict] = environment
+        self._mounts: list = _reformat_mounts(mounts) if mounts else mounts  # type: ignore[assignment]
+        self._labels: list = _reformat_labels(labels) if labels else labels  # type: ignore[assignment]
+        self._port = port
+        self._construct()
+
+    @property
+    def local_path(self) -> Optional[str]:
+        """Returns the local path of the devcontainer.json.
+
+        :return: str
+        """
+        return self._local_path
+
+    def _construct(self) -> None:
+        """Constructs the devcontainer properties based on attributes."""
+        if self._image:
+            self._properties = Image(image=self._image).to_dict()
+        elif self._dockerfile_path and self._build_context:
+            self._properties = Build(
+                dockerfile_path=self._dockerfile_path,
+                build_context=self._build_context,
+                target=self._build_target,
+            ).to_dict()
+
+        if self._properties is not None:
+            self._properties.update(OverrideCommand().to_dict())
+            self._properties.update(Extensions().to_dict())
+            self._properties.update(Settings().to_dict())
+
+            if self._environment:
+                self._properties.update(ContainerEnv(environment_variables=self._environment).to_dict())
+            if self._mounts:
+                self._properties.update(Mounts(mounts=self._mounts).to_dict())
+            if self._labels:
+                self._properties.update(RunArgs(labels=self._labels).to_dict())
+            if self._port:
+                self._properties.update(AppPort(port=self._port).to_dict())
+                self._properties.update(ForwardPorts(port=self._port).to_dict())
+
+    def write_file(self, directory_path: str) -> None:
+        """Writes this devcontainer.json to provided directory.
+
+        :param directory_path: absolute path of local directory to write devcontainer.json.
+        :type directory_path: str
+        """
+        self._local_path = get_wsl_path(directory_path) if in_wsl() else directory_path
+
+        file_path = _get_devcontainer_file_path(directory_path=directory_path)
+        with open(file_path, "w", encoding=DefaultOpenEncoding.WRITE) as f:
+            f.write(f"{json.dumps(self._properties, indent=4)}\n")
+
+
+def _reformat_mounts(mounts: Dict[str, Dict[str, Dict[str, str]]]) -> List[str]:
+    """Reformat mounts from Docker format to DevContainer format.
+
+    :param mounts: Dictionary with mount information for Docker container. For example:
+        .. code-block:: python
+
+            {
+                "<unique mount key>": {
+                    "<local_source>": {
+                        "<mount type i.e. bind>": "<container_dest>"
+                    }
+                }
+            }
+
+    :type mounts: dict
+    :return:
+       ["source=${localWorkspaceFolder}/app-scripts, target=/usr/local/share/app-scripts,type=bind,consistency=cached"]
+    :rtype: List[str]
+    """
+    devcontainer_mounts = []
+    for mount_dict in mounts.values():
+        for source, dest in mount_dict.items():
+            for mount_type, container_dest in dest.items():
+                devcontainer_mounts.append(f"source={source},target={container_dest},type={mount_type}")
+    return devcontainer_mounts
+
+
+def _reformat_labels(labels: Dict[str, str]) -> List[str]:
+    """Reformat labels from Docker format to DevContainer format.
+
+    :param labels: Dictionary with label information for Docker container. For example:
+        .. code-block:: python
+
+            {
+                "key": "value",
+                "key1": "value1"
+            }
+
+    :type labels: Dict[str, str]
+    :return: ["--label=key=value", "--label=key1=value1"]
+    :rtype: List[str]
+    """
+    devcontainer_labels = []
+    for key, value in labels.items():
+        devcontainer_labels.append(f"--label={key}={value}")
+    return devcontainer_labels
+
+
+def _get_devcontainer_file_path(directory_path: str) -> str:
+    """Returns the path of the devcontainer in relation to provided directory path.
+
+    :param directory_path: absolute path of local directory to write devcontainer.json.
+    :type directory_path: str
+    :return: Absolute path to the devcontainer
+    :rtype: str
+    """
+    devcontainer_path = Path(directory_path, ".devcontainer")
+    devcontainer_path.mkdir(parents=True, exist_ok=True)
+    file_path = str(Path(devcontainer_path, "devcontainer.json").resolve())
+    return file_path
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/vscode_client.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/vscode_client.py
new file mode 100644
index 00000000..b2381bbf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_local_endpoints/vscode_debug/vscode_client.py
@@ -0,0 +1,51 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+import binascii
+import re
+
+from azure.ai.ml._local_endpoints.utilities.commandline_utility import run_cli_command
+from azure.ai.ml._local_endpoints.vscode_debug.devcontainer_resolver import DevContainerResolver
+from azure.ai.ml.exceptions import VSCodeCommandNotFound
+
+
+class VSCodeClient(object):
+    # pylint: disable=client-method-has-more-than-5-positional-arguments
+    def create_dev_container_json(
+        self,
+        azureml_container,  # pylint: disable=unused-argument
+        endpoint_name: str,  # pylint: disable=unused-argument
+        deployment_name: str,  # pylint: disable=unused-argument
+        build_directory: str,
+        image_name: str,
+        environment: dict,
+        volumes: list,
+        labels: dict,
+    ) -> str:
+        devcontainer = DevContainerResolver(
+            image=image_name,
+            environment=environment,
+            mounts=volumes,  # type: ignore[arg-type]
+            labels=labels,
+        )
+        devcontainer.write_file(build_directory)
+        return str(devcontainer.local_path)
+
+    def invoke_dev_container(self, devcontainer_path: str, app_path: str) -> None:
+        hex_encoded_devcontainer_path = _encode_hex(devcontainer_path)
+        command = [
+            "code",
+            "--folder-uri",
+            f"vscode-remote://dev-container+{hex_encoded_devcontainer_path}{app_path}",
+        ]
+        try:
+            run_cli_command(command)
+        except Exception as e:
+            # pylint: disable=no-member
+            output = e.output.decode(encoding="UTF-8")  # type: ignore[attr-defined]
+            raise VSCodeCommandNotFound(output) from e
+
+
+def _encode_hex(path: str):
+    vscode_path = re.sub("\\s+", "", path)
+    return binascii.hexlify(vscode_path.encode()).decode("ascii")