about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/__init__.py5
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/data_import.py130
-rw-r--r--.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/schedule.py115
3 files changed, 250 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/__init__.py
new file mode 100644
index 00000000..fdf8caba
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/__init__.py
@@ -0,0 +1,5 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/data_import.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/data_import.py
new file mode 100644
index 00000000..028d431c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/data_import.py
@@ -0,0 +1,130 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2023_06_01_preview.models import DatabaseSource as RestDatabaseSource
+from azure.ai.ml._restclient.v2023_06_01_preview.models import DataImport as RestDataImport
+from azure.ai.ml._restclient.v2023_06_01_preview.models import FileSystemSource as RestFileSystemSource
+from azure.ai.ml._schema import DataImportSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, AssetTypes
+from azure.ai.ml.data_transfer import Database, FileSystem
+from azure.ai.ml.entities._assets import Data
+from azure.ai.ml.entities._util import load_from_dict
+
+
+@experimental
+class DataImport(Data):
+    """Data asset with a creating data import job.
+
+    :param name: Name of the asset.
+    :type name: str
+    :param path: The path to the asset being created by data import job.
+    :type path: str
+    :param source: The source of the asset data being copied from.
+    :type source: Union[Database, FileSystem]
+    :param version: Version of the resource.
+    :type version: str
+    :param description: Description of the resource.
+    :type description: str
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The asset property dictionary.
+    :type properties: dict[str, str]
+    :param kwargs: A dictionary of additional configuration parameters.
+    :type kwargs: dict
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        path: str,
+        source: Union[Database, FileSystem],
+        version: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(
+            name=name,
+            version=version,
+            description=description,
+            tags=tags,
+            properties=properties,
+            path=path,
+            **kwargs,
+        )
+        self.source = source
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "DataImport":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        res: DataImport = load_from_dict(DataImportSchema, data, context, **kwargs)
+        return res
+
+    def _to_rest_object(self) -> RestDataImport:
+        if isinstance(self.source, Database):
+            source = RestDatabaseSource(
+                connection=self.source.connection,
+                query=self.source.query,
+            )
+        else:
+            source = RestFileSystemSource(
+                connection=self.source.connection,
+                path=self.source.path,
+            )
+
+        return RestDataImport(
+            description=self.description,
+            properties=self.properties,
+            tags=self.tags,
+            data_type=self.type,
+            data_uri=self.path,
+            asset_name=self.name,
+            source=source,
+        )
+
+    @classmethod
+    def _from_rest_object(cls, data_rest_object: RestDataImport) -> "DataImport":
+        source: Any = None
+        if isinstance(data_rest_object.source, RestDatabaseSource):
+            source = Database(
+                connection=data_rest_object.source.connection,
+                query=data_rest_object.source.query,
+            )
+            data_type = AssetTypes.MLTABLE
+        else:
+            source = FileSystem(
+                connection=data_rest_object.source.connection,
+                path=data_rest_object.source.path,
+            )
+            data_type = AssetTypes.URI_FOLDER
+
+        data_import = cls(
+            name=data_rest_object.asset_name,
+            path=data_rest_object.data_uri,
+            source=source,
+            description=data_rest_object.description,
+            tags=data_rest_object.tags,
+            properties=data_rest_object.properties,
+            type=data_type,
+            is_anonymous=data_rest_object.is_anonymous,
+        )
+        return data_import
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/schedule.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/schedule.py
new file mode 100644
index 00000000..6a51878a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/entities/_data_import/schedule.py
@@ -0,0 +1,115 @@
+# ---------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# ---------------------------------------------------------
+# pylint: disable=protected-access
+from os import PathLike
+from pathlib import Path
+from typing import Any, Dict, Optional, Union
+
+from azure.ai.ml._restclient.v2023_04_01_preview.models import ImportDataAction
+from azure.ai.ml._restclient.v2023_04_01_preview.models import Schedule as RestSchedule
+from azure.ai.ml._restclient.v2023_04_01_preview.models import ScheduleProperties
+from azure.ai.ml._schema._data_import.schedule import ImportDataScheduleSchema
+from azure.ai.ml._utils._experimental import experimental
+from azure.ai.ml.constants._common import BASE_PATH_CONTEXT_KEY, PARAMS_OVERRIDE_KEY, ScheduleType
+from azure.ai.ml.entities._data_import.data_import import DataImport
+from azure.ai.ml.entities._schedule.schedule import Schedule
+from azure.ai.ml.entities._schedule.trigger import CronTrigger, RecurrenceTrigger, TriggerBase
+from azure.ai.ml.entities._system_data import SystemData
+from azure.ai.ml.entities._util import load_from_dict
+
+
+@experimental
+class ImportDataSchedule(Schedule):
+    """ImportDataSchedule object.
+
+    :param name: Name of the schedule.
+    :type name: str
+    :param trigger: Trigger of the schedule.
+    :type trigger: Union[CronTrigger, RecurrenceTrigger]
+    :param import_data: The schedule action data import definition.
+    :type import_data: DataImport
+    :param display_name: Display name of the schedule.
+    :type display_name: str
+    :param description: Description of the schedule, defaults to None
+    :type description: str
+    :param tags: Tag dictionary. Tags can be added, removed, and updated.
+    :type tags: dict[str, str]
+    :param properties: The data import property dictionary.
+    :type properties: dict[str, str]
+    """
+
+    def __init__(
+        self,
+        *,
+        name: str,
+        trigger: Optional[Union[CronTrigger, RecurrenceTrigger]],
+        import_data: DataImport,
+        display_name: Optional[str] = None,
+        description: Optional[str] = None,
+        tags: Optional[Dict] = None,
+        properties: Optional[Dict] = None,
+        **kwargs: Any,
+    ):
+        super().__init__(
+            name=name,
+            trigger=trigger,
+            display_name=display_name,
+            description=description,
+            tags=tags,
+            properties=properties,
+            **kwargs,
+        )
+        self.import_data = import_data
+        self._type = ScheduleType.DATA_IMPORT
+
+    @classmethod
+    def _load(
+        cls,
+        data: Optional[Dict] = None,
+        yaml_path: Optional[Union[PathLike, str]] = None,
+        params_override: Optional[list] = None,
+        **kwargs: Any,
+    ) -> "ImportDataSchedule":
+        data = data or {}
+        params_override = params_override or []
+        context = {
+            BASE_PATH_CONTEXT_KEY: Path(yaml_path).parent if yaml_path else Path("./"),
+            PARAMS_OVERRIDE_KEY: params_override,
+        }
+        return ImportDataSchedule(
+            base_path=context[BASE_PATH_CONTEXT_KEY],
+            **load_from_dict(ImportDataScheduleSchema, data, context, **kwargs),
+        )
+
+    @classmethod
+    def _create_schema_for_validation(cls, context: Any) -> ImportDataScheduleSchema:
+        return ImportDataScheduleSchema(context=context)
+
+    @classmethod
+    def _from_rest_object(cls, obj: RestSchedule) -> "ImportDataSchedule":
+        return cls(
+            trigger=TriggerBase._from_rest_object(obj.properties.trigger),
+            import_data=DataImport._from_rest_object(obj.properties.action.data_import_definition),
+            name=obj.name,
+            display_name=obj.properties.display_name,
+            description=obj.properties.description,
+            tags=obj.properties.tags,
+            properties=obj.properties.properties,
+            provisioning_state=obj.properties.provisioning_state,
+            is_enabled=obj.properties.is_enabled,
+            creation_context=SystemData._from_rest_object(obj.system_data),
+        )
+
+    def _to_rest_object(self) -> RestSchedule:
+        return RestSchedule(
+            properties=ScheduleProperties(
+                description=self.description,
+                properties=self.properties,
+                tags=self.tags,
+                action=ImportDataAction(data_import_definition=self.import_data._to_rest_object()),
+                display_name=self.display_name,
+                is_enabled=self._is_enabled,
+                trigger=self.trigger._to_rest_object() if self.trigger is not None else None,
+            )
+        )