diff options
| author | S. Solomon Darnell | 2025-03-28 21:52:21 -0500 |
|---|---|---|
| committer | S. Solomon Darnell | 2025-03-28 21:52:21 -0500 |
| commit | 4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch) | |
| tree | ee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore | |
| parent | cc961e04ba734dd72309fb548a2f97d67d578813 (diff) | |
| download | gn-ai-master.tar.gz | |
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore')
7 files changed, 409 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/__init__.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/__init__.py new file mode 100644 index 00000000..18774380 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/__init__.py @@ -0,0 +1,30 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore + +from .adls_gen1 import AzureDataLakeGen1Schema +from .azure_storage import AzureBlobSchema, AzureDataLakeGen2Schema, AzureFileSchema, AzureStorageSchema +from .credentials import ( + AccountKeySchema, + BaseTenantCredentialSchema, + CertificateSchema, + NoneCredentialsSchema, + SasTokenSchema, + ServicePrincipalSchema, +) + +__all__ = [ + "AccountKeySchema", + "AzureBlobSchema", + "AzureDataLakeGen1Schema", + "AzureDataLakeGen2Schema", + "AzureFileSchema", + "AzureStorageSchema", + "BaseTenantCredentialSchema", + "CertificateSchema", + "NoneCredentialsSchema", + "SasTokenSchema", + "ServicePrincipalSchema", +] diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem.py new file mode 100644 index 00000000..1f0a9710 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem.py @@ -0,0 +1,40 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Any, Dict + +from marshmallow import fields, post_load + +from azure.ai.ml._restclient.v2022_10_01_preview.models import DatastoreType +from azure.ai.ml._schema.core.fields import NestedField, PathAwareSchema, StringTransformedEnum, UnionField +from azure.ai.ml._utils.utils import camel_to_snake + +from ._on_prem_credentials import KerberosKeytabSchema, KerberosPasswordSchema + + +class HdfsSchema(PathAwareSchema): + name = fields.Str(required=True) + id = fields.Str(dump_only=True) + type = StringTransformedEnum( + allowed_values=DatastoreType.HDFS, + casing_transform=camel_to_snake, + required=True, + ) + hdfs_server_certificate = fields.Str() + name_node_address = fields.Str(required=True) + protocol = fields.Str() + credentials = UnionField( + [NestedField(KerberosPasswordSchema), NestedField(KerberosKeytabSchema)], + required=True, + ) + description = fields.Str() + tags = fields.Dict(keys=fields.Str(), values=fields.Dict()) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "HdfsDatastore": + from azure.ai.ml.entities._datastore._on_prem import HdfsDatastore + + return HdfsDatastore(**data) diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem_credentials.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem_credentials.py new file mode 100644 index 00000000..ada92afc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/_on_prem_credentials.py @@ -0,0 +1,53 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Dict + +from marshmallow import ValidationError, fields, post_load, pre_dump + +from azure.ai.ml._schema.core.schema import PatchedSchemaMeta + + +class BaseKerberosCredentials(metaclass=PatchedSchemaMeta): + kerberos_realm = fields.Str(required=True) + kerberos_kdc_address = fields.Str(required=True) + kerberos_principal = fields.Str(required=True) + + +class KerberosPasswordSchema(BaseKerberosCredentials): + kerberos_password = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, str], **kwargs) -> "KerberosPasswordCredentials": + from azure.ai.ml.entities._datastore._on_prem_credentials import KerberosPasswordCredentials + + return KerberosPasswordCredentials(**data) + + @pre_dump + def predump(self, data, **kwargs): + from azure.ai.ml.entities._datastore._on_prem_credentials import KerberosPasswordCredentials + + if not isinstance(data, KerberosPasswordCredentials): + raise ValidationError("Cannot dump non-KerberosPasswordCredentials object into KerberosPasswordCredentials") + return data + + +class KerberosKeytabSchema(BaseKerberosCredentials): + kerberos_keytab = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, str], **kwargs) -> "KerberosKeytabCredentials": + from azure.ai.ml.entities._datastore._on_prem_credentials import KerberosKeytabCredentials + + return KerberosKeytabCredentials(**data) + + @pre_dump + def predump(self, data, **kwargs): + from azure.ai.ml.entities._datastore._on_prem_credentials import KerberosKeytabCredentials + + if not isinstance(data, KerberosKeytabCredentials): + raise ValidationError("Cannot dump non-KerberosKeytabCredentials object into KerberosKeytabCredentials") + return data diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/adls_gen1.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/adls_gen1.py new file mode 100644 index 00000000..7a575fc6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/adls_gen1.py @@ -0,0 +1,41 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Any, Dict + +from marshmallow import fields, post_load + +from azure.ai.ml._restclient.v2022_10_01.models import DatastoreType +from azure.ai.ml._schema.core.fields import NestedField, PathAwareSchema, StringTransformedEnum, UnionField +from azure.ai.ml._utils.utils import camel_to_snake + +from .credentials import CertificateSchema, NoneCredentialsSchema, ServicePrincipalSchema + + +class AzureDataLakeGen1Schema(PathAwareSchema): + name = fields.Str(required=True) + id = fields.Str(dump_only=True) + type = StringTransformedEnum( + allowed_values=DatastoreType.AZURE_DATA_LAKE_GEN1, + casing_transform=camel_to_snake, + required=True, + ) + store_name = fields.Str(required=True) + credentials = UnionField( + [ + NestedField(ServicePrincipalSchema), + NestedField(CertificateSchema), + NestedField(NoneCredentialsSchema), + ] + ) + description = fields.Str() + tags = fields.Dict(keys=fields.Str(), values=fields.Dict()) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "AzureDataLakeGen1Datastore": + from azure.ai.ml.entities import AzureDataLakeGen1Datastore + + return AzureDataLakeGen1Datastore(**data) diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/azure_storage.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/azure_storage.py new file mode 100644 index 00000000..ffe8c61c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/azure_storage.py @@ -0,0 +1,97 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Any, Dict + +from marshmallow import fields, post_load + +from azure.ai.ml._restclient.v2022_10_01.models import DatastoreType +from azure.ai.ml._schema.core.fields import NestedField, PathAwareSchema, StringTransformedEnum, UnionField +from azure.ai.ml._utils.utils import camel_to_snake + +from .credentials import ( + AccountKeySchema, + CertificateSchema, + NoneCredentialsSchema, + SasTokenSchema, + ServicePrincipalSchema, +) + + +class AzureStorageSchema(PathAwareSchema): + name = fields.Str(required=True) + id = fields.Str(dump_only=True) + account_name = fields.Str(required=True) + endpoint = fields.Str() + protocol = fields.Str() + description = fields.Str() + tags = fields.Dict(keys=fields.Str(), values=fields.Str()) + + +class AzureFileSchema(AzureStorageSchema): + type = StringTransformedEnum( + allowed_values=DatastoreType.AZURE_FILE, + casing_transform=camel_to_snake, + required=True, + ) + file_share_name = fields.Str(required=True) + credentials = UnionField( + [ + NestedField(AccountKeySchema), + NestedField(SasTokenSchema), + NestedField(NoneCredentialsSchema), + ] + ) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "AzureFileDatastore": # type: ignore[name-defined] + from azure.ai.ml.entities import AzureFileDatastore + + return AzureFileDatastore(**data) + + +class AzureBlobSchema(AzureStorageSchema): + type = StringTransformedEnum( + allowed_values=DatastoreType.AZURE_BLOB, + casing_transform=camel_to_snake, + required=True, + ) + container_name = fields.Str(required=True) + credentials = UnionField( + [ + NestedField(AccountKeySchema), + NestedField(SasTokenSchema), + NestedField(NoneCredentialsSchema), + ], + ) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "AzureBlobDatastore": # type: ignore[name-defined] + from azure.ai.ml.entities import AzureBlobDatastore + + return AzureBlobDatastore(**data) + + +class AzureDataLakeGen2Schema(AzureStorageSchema): + type = StringTransformedEnum( + allowed_values=DatastoreType.AZURE_DATA_LAKE_GEN2, + casing_transform=camel_to_snake, + required=True, + ) + filesystem = fields.Str(required=True) + credentials = UnionField( + [ + NestedField(ServicePrincipalSchema), + NestedField(CertificateSchema), + NestedField(NoneCredentialsSchema), + ] + ) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "AzureDataLakeGen2Datastore": + from azure.ai.ml.entities import AzureDataLakeGen2Datastore + + return AzureDataLakeGen2Datastore(**data) diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/credentials.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/credentials.py new file mode 100644 index 00000000..a4b46aa0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/credentials.py @@ -0,0 +1,99 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Any, Dict + +from marshmallow import ValidationError, fields, post_load, pre_dump, pre_load + +from azure.ai.ml._schema.core.schema import PatchedSchemaMeta +from azure.ai.ml.entities._credentials import ( + AccountKeyConfiguration, + CertificateConfiguration, + NoneCredentialConfiguration, + SasTokenConfiguration, + ServicePrincipalConfiguration, +) + + +class NoneCredentialsSchema(metaclass=PatchedSchemaMeta): + @post_load + def make(self, data: Dict[str, str], **kwargs) -> NoneCredentialConfiguration: + return NoneCredentialConfiguration(**data) + + +class AccountKeySchema(metaclass=PatchedSchemaMeta): + account_key = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, str], **kwargs) -> AccountKeyConfiguration: + return AccountKeyConfiguration(**data) + + @pre_dump + def predump(self, data, **kwargs): + if not isinstance(data, AccountKeyConfiguration): + raise ValidationError("Cannot dump non-AccountKeyCredentials object into AccountKeyCredentials") + return data + + +class SasTokenSchema(metaclass=PatchedSchemaMeta): + sas_token = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, str], **kwargs) -> SasTokenConfiguration: + return SasTokenConfiguration(**data) + + @pre_dump + def predump(self, data, **kwargs): + if not isinstance(data, SasTokenConfiguration): + raise ValidationError("Cannot dump non-SasTokenCredentials object into SasTokenCredentials") + return data + + +class BaseTenantCredentialSchema(metaclass=PatchedSchemaMeta): + authority_url = fields.Str() + resource_url = fields.Str() + tenant_id = fields.Str(required=True) + client_id = fields.Str(required=True) + + @pre_load + def accept_backward_compatible_keys(self, data, **kwargs): + acceptable_keys = [key for key in data.keys() if key in ("authority_url", "authority_uri")] + if len(acceptable_keys) > 1: + raise ValidationError( + "Cannot specify both 'authority_url' and 'authority_uri'. Please use 'authority_url'." + ) + if acceptable_keys: + data["authority_url"] = data.pop(acceptable_keys[0]) + return data + + +class ServicePrincipalSchema(BaseTenantCredentialSchema): + client_secret = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, str], **kwargs) -> ServicePrincipalConfiguration: + return ServicePrincipalConfiguration(**data) + + @pre_dump + def predump(self, data, **kwargs): + if not isinstance(data, ServicePrincipalConfiguration): + raise ValidationError("Cannot dump non-ServicePrincipalCredentials object into ServicePrincipalCredentials") + return data + + +class CertificateSchema(BaseTenantCredentialSchema): + certificate = fields.Str() + thumbprint = fields.Str(required=True) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> CertificateConfiguration: + return CertificateConfiguration(**data) + + @pre_dump + def predump(self, data, **kwargs): + if not isinstance(data, CertificateConfiguration): + raise ValidationError("Cannot dump non-CertificateCredentials object into CertificateCredentials") + return data diff --git a/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/one_lake.py b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/one_lake.py new file mode 100644 index 00000000..4b5e7b66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/azure/ai/ml/_schema/_datastore/one_lake.py @@ -0,0 +1,49 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +# pylint: disable=unused-argument + +from typing import Any, Dict + +from marshmallow import Schema, fields, post_load + +from azure.ai.ml._restclient.v2023_04_01_preview.models import DatastoreType, OneLakeArtifactType +from azure.ai.ml._schema.core.fields import NestedField, PathAwareSchema, StringTransformedEnum, UnionField +from azure.ai.ml._utils.utils import camel_to_snake + +from .credentials import NoneCredentialsSchema, ServicePrincipalSchema + + +class OneLakeArtifactSchema(Schema): + name = fields.Str(required=True) + type = StringTransformedEnum(allowed_values=OneLakeArtifactType.LAKE_HOUSE, casing_transform=camel_to_snake) + + +class OneLakeSchema(PathAwareSchema): + name = fields.Str(required=True) + id = fields.Str(dump_only=True) + type = StringTransformedEnum( + allowed_values=DatastoreType.ONE_LAKE, + casing_transform=camel_to_snake, + required=True, + ) + # required fields for OneLake + one_lake_workspace_name = fields.Str(required=True) + endpoint = fields.Str(required=True) + artifact = NestedField(OneLakeArtifactSchema) + # ServicePrincipal and UserIdentity are the two supported credential types + credentials = UnionField( + [ + NestedField(ServicePrincipalSchema), + NestedField(NoneCredentialsSchema), + ] + ) + description = fields.Str() + tags = fields.Dict(keys=fields.Str(), values=fields.Str()) + + @post_load + def make(self, data: Dict[str, Any], **kwargs) -> "OneLakeDatastore": + from azure.ai.ml.entities import OneLakeDatastore + + return OneLakeDatastore(**data) |
