aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated')
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/__init__.py29
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_azure_file_storage.py130
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_configuration.py77
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_serialization.py2050
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/__init__.py29
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_azure_file_storage.py132
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_configuration.py77
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/__init__.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py1056
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_file_operations.py2518
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_service_operations.py284
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_share_operations.py1765
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/__init__.py130
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py222
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_models_py3.py1711
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/__init__.py31
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_directory_operations.py1570
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_file_operations.py3755
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_patch.py20
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_service_operations.py410
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_share_operations.py2595
-rw-r--r--.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/py.typed1
26 files changed, 18703 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/__init__.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/__init__.py
new file mode 100644
index 00000000..b4f1dd31
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/__init__.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._azure_file_storage import AzureFileStorage # type: ignore
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import *
+except ImportError:
+ _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "AzureFileStorage",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_azure_file_storage.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_azure_file_storage.py
new file mode 100644
index 00000000..8a332771
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_azure_file_storage.py
@@ -0,0 +1,130 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Optional, Union
+from typing_extensions import Self
+
+from azure.core import PipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import HttpRequest, HttpResponse
+
+from . import models as _models
+from ._configuration import AzureFileStorageConfiguration
+from ._serialization import Deserializer, Serializer
+from .operations import DirectoryOperations, FileOperations, ServiceOperations, ShareOperations
+
+
+class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword
+ """AzureFileStorage.
+
+ :ivar service: ServiceOperations operations
+ :vartype service: azure.storage.fileshare.operations.ServiceOperations
+ :ivar share: ShareOperations operations
+ :vartype share: azure.storage.fileshare.operations.ShareOperations
+ :ivar directory: DirectoryOperations operations
+ :vartype directory: azure.storage.fileshare.operations.DirectoryOperations
+ :ivar file: FileOperations operations
+ :vartype file: azure.storage.fileshare.operations.FileOperations
+ :param url: The URL of the service account, share, directory or file that is the target of the
+ desired operation. Required.
+ :type url: str
+ :param base_url: Service URL. Required. Default value is "".
+ :type base_url: str
+ :param file_request_intent: Valid value is backup. "backup" Default value is None.
+ :type file_request_intent: str or ~azure.storage.fileshare.models.ShareTokenIntent
+ :param allow_trailing_dot: If true, the trailing dot will not be trimmed from the target URI.
+ Default value is None.
+ :type allow_trailing_dot: bool
+ :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source
+ URI. Default value is None.
+ :type allow_source_trailing_dot: bool
+ :keyword version: Specifies the version of the operation to use for this request. Default value
+ is "2025-05-05". Note that overriding this default value may result in unsupported behavior.
+ :paramtype version: str
+ :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
+ downloaded from the source url into the specified range. Default value is "update". Note that
+ overriding this default value may result in unsupported behavior.
+ :paramtype file_range_write_from_url: str
+ """
+
+ def __init__( # pylint: disable=missing-client-constructor-parameter-credential
+ self,
+ url: str,
+ base_url: str = "",
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ self._config = AzureFileStorageConfiguration(
+ url=url,
+ file_request_intent=file_request_intent,
+ allow_trailing_dot=allow_trailing_dot,
+ allow_source_trailing_dot=allow_source_trailing_dot,
+ **kwargs
+ )
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: PipelineClient = PipelineClient(base_url=base_url, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+ self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.share = ShareOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.directory = DirectoryOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.file = FileOperations(self._client, self._config, self._serialize, self._deserialize)
+
+ def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse:
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = client._send_request(request)
+ <HttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.HttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ request_copy.url = self._client.format_url(request_copy.url)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
+
+ def close(self) -> None:
+ self._client.close()
+
+ def __enter__(self) -> Self:
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details: Any) -> None:
+ self._client.__exit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_configuration.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_configuration.py
new file mode 100644
index 00000000..6b42bcdf
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_configuration.py
@@ -0,0 +1,77 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Literal, Optional, Union
+
+from azure.core.pipeline import policies
+
+from . import models as _models
+
+VERSION = "unknown"
+
+
+class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attributes
+ """Configuration for AzureFileStorage.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param url: The URL of the service account, share, directory or file that is the target of the
+ desired operation. Required.
+ :type url: str
+ :param file_request_intent: Valid value is backup. "backup" Default value is None.
+ :type file_request_intent: str or ~azure.storage.fileshare.models.ShareTokenIntent
+ :param allow_trailing_dot: If true, the trailing dot will not be trimmed from the target URI.
+ Default value is None.
+ :type allow_trailing_dot: bool
+ :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source
+ URI. Default value is None.
+ :type allow_source_trailing_dot: bool
+ :keyword version: Specifies the version of the operation to use for this request. Default value
+ is "2025-05-05". Note that overriding this default value may result in unsupported behavior.
+ :paramtype version: str
+ :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
+ downloaded from the source url into the specified range. Default value is "update". Note that
+ overriding this default value may result in unsupported behavior.
+ :paramtype file_range_write_from_url: str
+ """
+
+ def __init__(
+ self,
+ url: str,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ version: Literal["2025-05-05"] = kwargs.pop("version", "2025-05-05")
+ file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update")
+
+ if url is None:
+ raise ValueError("Parameter 'url' must not be None.")
+
+ self.url = url
+ self.file_request_intent = file_request_intent
+ self.allow_trailing_dot = allow_trailing_dot
+ self.allow_source_trailing_dot = allow_source_trailing_dot
+ self.version = version
+ self.file_range_write_from_url = file_range_write_from_url
+ kwargs.setdefault("sdk_moniker", "azurefilestorage/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
+ self._configure(**kwargs)
+
+ def _configure(self, **kwargs: Any) -> None:
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_patch.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_serialization.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_serialization.py
new file mode 100644
index 00000000..a066e16a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/_serialization.py
@@ -0,0 +1,2050 @@
+# pylint: disable=too-many-lines
+# --------------------------------------------------------------------------
+#
+# Copyright (c) Microsoft Corporation. All rights reserved.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the ""Software""), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# --------------------------------------------------------------------------
+
+# pyright: reportUnnecessaryTypeIgnoreComment=false
+
+from base64 import b64decode, b64encode
+import calendar
+import datetime
+import decimal
+import email
+from enum import Enum
+import json
+import logging
+import re
+import sys
+import codecs
+from typing import (
+ Dict,
+ Any,
+ cast,
+ Optional,
+ Union,
+ AnyStr,
+ IO,
+ Mapping,
+ Callable,
+ MutableMapping,
+ List,
+)
+
+try:
+ from urllib import quote # type: ignore
+except ImportError:
+ from urllib.parse import quote
+import xml.etree.ElementTree as ET
+
+import isodate # type: ignore
+from typing_extensions import Self
+
+from azure.core.exceptions import DeserializationError, SerializationError
+from azure.core.serialization import NULL as CoreNull
+
+_BOM = codecs.BOM_UTF8.decode(encoding="utf-8")
+
+JSON = MutableMapping[str, Any]
+
+
+class RawDeserializer:
+
+ # Accept "text" because we're open minded people...
+ JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$")
+
+ # Name used in context
+ CONTEXT_NAME = "deserialized_data"
+
+ @classmethod
+ def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any:
+ """Decode data according to content-type.
+
+ Accept a stream of data as well, but will be load at once in memory for now.
+
+ If no content-type, will return the string version (not bytes, not stream)
+
+ :param data: Input, could be bytes or stream (will be decoded with UTF8) or text
+ :type data: str or bytes or IO
+ :param str content_type: The content type.
+ :return: The deserialized data.
+ :rtype: object
+ """
+ if hasattr(data, "read"):
+ # Assume a stream
+ data = cast(IO, data).read()
+
+ if isinstance(data, bytes):
+ data_as_str = data.decode(encoding="utf-8-sig")
+ else:
+ # Explain to mypy the correct type.
+ data_as_str = cast(str, data)
+
+ # Remove Byte Order Mark if present in string
+ data_as_str = data_as_str.lstrip(_BOM)
+
+ if content_type is None:
+ return data
+
+ if cls.JSON_REGEXP.match(content_type):
+ try:
+ return json.loads(data_as_str)
+ except ValueError as err:
+ raise DeserializationError("JSON is invalid: {}".format(err), err) from err
+ elif "xml" in (content_type or []):
+ try:
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string
+ data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore
+ except NameError:
+ pass
+
+ return ET.fromstring(data_as_str) # nosec
+ except ET.ParseError as err:
+ # It might be because the server has an issue, and returned JSON with
+ # content-type XML....
+ # So let's try a JSON load, and if it's still broken
+ # let's flow the initial exception
+ def _json_attemp(data):
+ try:
+ return True, json.loads(data)
+ except ValueError:
+ return False, None # Don't care about this one
+
+ success, json_result = _json_attemp(data)
+ if success:
+ return json_result
+ # If i'm here, it's not JSON, it's not XML, let's scream
+ # and raise the last context in this block (the XML exception)
+ # The function hack is because Py2.7 messes up with exception
+ # context otherwise.
+ _LOGGER.critical("Wasn't XML not JSON, failing")
+ raise DeserializationError("XML is invalid") from err
+ elif content_type.startswith("text/"):
+ return data_as_str
+ raise DeserializationError("Cannot deserialize content-type: {}".format(content_type))
+
+ @classmethod
+ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any:
+ """Deserialize from HTTP response.
+
+ Use bytes and headers to NOT use any requests/aiohttp or whatever
+ specific implementation.
+ Headers will tested for "content-type"
+
+ :param bytes body_bytes: The body of the response.
+ :param dict headers: The headers of the response.
+ :returns: The deserialized data.
+ :rtype: object
+ """
+ # Try to use content-type from headers if available
+ content_type = None
+ if "content-type" in headers:
+ content_type = headers["content-type"].split(";")[0].strip().lower()
+ # Ouch, this server did not declare what it sent...
+ # Let's guess it's JSON...
+ # Also, since Autorest was considering that an empty body was a valid JSON,
+ # need that test as well....
+ else:
+ content_type = "application/json"
+
+ if body_bytes:
+ return cls.deserialize_from_text(body_bytes, content_type)
+ return None
+
+
+_LOGGER = logging.getLogger(__name__)
+
+try:
+ _long_type = long # type: ignore
+except NameError:
+ _long_type = int
+
+TZ_UTC = datetime.timezone.utc
+
+_FLATTEN = re.compile(r"(?<!\\)\.")
+
+
+def attribute_transformer(key, attr_desc, value): # pylint: disable=unused-argument
+ """A key transformer that returns the Python attribute.
+
+ :param str key: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: A key using attribute name
+ :rtype: str
+ """
+ return (key, value)
+
+
+def full_restapi_key_transformer(key, attr_desc, value): # pylint: disable=unused-argument
+ """A key transformer that returns the full RestAPI key path.
+
+ :param str key: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: A list of keys using RestAPI syntax.
+ :rtype: list
+ """
+ keys = _FLATTEN.split(attr_desc["key"])
+ return ([_decode_attribute_map_key(k) for k in keys], value)
+
+
+def last_restapi_key_transformer(key, attr_desc, value):
+ """A key transformer that returns the last RestAPI key.
+
+ :param str key: The attribute name
+ :param dict attr_desc: The attribute metadata
+ :param object value: The value
+ :returns: The last RestAPI key.
+ :rtype: str
+ """
+ key, value = full_restapi_key_transformer(key, attr_desc, value)
+ return (key[-1], value)
+
+
+def _create_xml_node(tag, prefix=None, ns=None):
+ """Create a XML node.
+
+ :param str tag: The tag name
+ :param str prefix: The prefix
+ :param str ns: The namespace
+ :return: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
+ if prefix and ns:
+ ET.register_namespace(prefix, ns)
+ if ns:
+ return ET.Element("{" + ns + "}" + tag)
+ return ET.Element(tag)
+
+
+class Model:
+ """Mixin for all client request body/response body models to support
+ serialization and deserialization.
+ """
+
+ _subtype_map: Dict[str, Dict[str, Any]] = {}
+ _attribute_map: Dict[str, Dict[str, Any]] = {}
+ _validation: Dict[str, Dict[str, Any]] = {}
+
+ def __init__(self, **kwargs: Any) -> None:
+ self.additional_properties: Optional[Dict[str, Any]] = {}
+ for k in kwargs: # pylint: disable=consider-using-dict-items
+ if k not in self._attribute_map:
+ _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__)
+ elif k in self._validation and self._validation[k].get("readonly", False):
+ _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__)
+ else:
+ setattr(self, k, kwargs[k])
+
+ def __eq__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are equal
+ :rtype: bool
+ """
+ if isinstance(other, self.__class__):
+ return self.__dict__ == other.__dict__
+ return False
+
+ def __ne__(self, other: Any) -> bool:
+ """Compare objects by comparing all attributes.
+
+ :param object other: The object to compare
+ :returns: True if objects are not equal
+ :rtype: bool
+ """
+ return not self.__eq__(other)
+
+ def __str__(self) -> str:
+ return str(self.__dict__)
+
+ @classmethod
+ def enable_additional_properties_sending(cls) -> None:
+ cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"}
+
+ @classmethod
+ def is_xml_model(cls) -> bool:
+ try:
+ cls._xml_map # type: ignore
+ except AttributeError:
+ return False
+ return True
+
+ @classmethod
+ def _create_xml_node(cls):
+ """Create XML node.
+
+ :returns: The XML node
+ :rtype: xml.etree.ElementTree.Element
+ """
+ try:
+ xml_map = cls._xml_map # type: ignore
+ except AttributeError:
+ xml_map = {}
+
+ return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None))
+
+ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON:
+ """Return the JSON that would be sent to server from this model.
+
+ This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`.
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, keep_readonly=keep_readonly, **kwargs
+ )
+
+ def as_dict(
+ self,
+ keep_readonly: bool = True,
+ key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer,
+ **kwargs: Any
+ ) -> JSON:
+ """Return a dict that can be serialized using json.dump.
+
+ Advanced usage might optionally use a callback as parameter:
+
+ .. code::python
+
+ def my_key_transformer(key, attr_desc, value):
+ return key
+
+ Key is the attribute name used in Python. Attr_desc
+ is a dict of metadata. Currently contains 'type' with the
+ msrest type and 'key' with the RestAPI encoded key.
+ Value is the current value in this object.
+
+ The string returned will be used to serialize the key.
+ If the return type is a list, this is considered hierarchical
+ result dict.
+
+ See the three examples in this file:
+
+ - attribute_transformer
+ - full_restapi_key_transformer
+ - last_restapi_key_transformer
+
+ If you want XML serialization, you can pass the kwargs is_xml=True.
+
+ :param bool keep_readonly: If you want to serialize the readonly attributes
+ :param function key_transformer: A key transformer function.
+ :returns: A dict JSON compatible object
+ :rtype: dict
+ """
+ serializer = Serializer(self._infer_class_models())
+ return serializer._serialize( # type: ignore # pylint: disable=protected-access
+ self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs
+ )
+
+ @classmethod
+ def _infer_class_models(cls):
+ try:
+ str_models = cls.__module__.rsplit(".", 1)[0]
+ models = sys.modules[str_models]
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ if cls.__name__ not in client_models:
+ raise ValueError("Not Autorest generated code")
+ except Exception: # pylint: disable=broad-exception-caught
+ # Assume it's not Autorest generated (tests?). Add ourselves as dependencies.
+ client_models = {cls.__name__: cls}
+ return client_models
+
+ @classmethod
+ def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self:
+ """Parse a str using the RestAPI syntax and return a model.
+
+ :param str data: A str using RestAPI structure. JSON by default.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises DeserializationError: if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def from_dict(
+ cls,
+ data: Any,
+ key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None,
+ content_type: Optional[str] = None,
+ ) -> Self:
+ """Parse a dict using given key extractor return a model.
+
+ By default consider key
+ extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor
+ and last_rest_key_case_insensitive_extractor)
+
+ :param dict data: A dict using RestAPI structure
+ :param function key_extractors: A key extractor function.
+ :param str content_type: JSON by default, set application/xml if XML.
+ :returns: An instance of this model
+ :raises: DeserializationError if something went wrong
+ :rtype: Self
+ """
+ deserializer = Deserializer(cls._infer_class_models())
+ deserializer.key_extractors = ( # type: ignore
+ [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ rest_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ if key_extractors is None
+ else key_extractors
+ )
+ return deserializer(cls.__name__, data, content_type=content_type) # type: ignore
+
+ @classmethod
+ def _flatten_subtype(cls, key, objects):
+ if "_subtype_map" not in cls.__dict__:
+ return {}
+ result = dict(cls._subtype_map[key])
+ for valuetype in cls._subtype_map[key].values():
+ result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access
+ return result
+
+ @classmethod
+ def _classify(cls, response, objects):
+ """Check the class _subtype_map for any child classes.
+ We want to ignore any inherited _subtype_maps.
+
+ :param dict response: The initial data
+ :param dict objects: The class objects
+ :returns: The class to be used
+ :rtype: class
+ """
+ for subtype_key in cls.__dict__.get("_subtype_map", {}).keys():
+ subtype_value = None
+
+ if not isinstance(response, ET.Element):
+ rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1]
+ subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None)
+ else:
+ subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response)
+ if subtype_value:
+ # Try to match base class. Can be class name only
+ # (bug to fix in Autorest to support x-ms-discriminator-name)
+ if cls.__name__ == subtype_value:
+ return cls
+ flatten_mapping_type = cls._flatten_subtype(subtype_key, objects)
+ try:
+ return objects[flatten_mapping_type[subtype_value]] # type: ignore
+ except KeyError:
+ _LOGGER.warning(
+ "Subtype value %s has no mapping, use base class %s.",
+ subtype_value,
+ cls.__name__,
+ )
+ break
+ else:
+ _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__)
+ break
+ return cls
+
+ @classmethod
+ def _get_rest_key_parts(cls, attr_key):
+ """Get the RestAPI key of this attr, split it and decode part
+ :param str attr_key: Attribute key must be in attribute_map.
+ :returns: A list of RestAPI part
+ :rtype: list
+ """
+ rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"])
+ return [_decode_attribute_map_key(key_part) for key_part in rest_split_key]
+
+
+def _decode_attribute_map_key(key):
+ """This decode a key in an _attribute_map to the actual key we want to look at
+ inside the received data.
+
+ :param str key: A key string from the generated code
+ :returns: The decoded key
+ :rtype: str
+ """
+ return key.replace("\\.", ".")
+
+
+class Serializer: # pylint: disable=too-many-public-methods
+ """Request object model serializer."""
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()}
+ days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"}
+ months = {
+ 1: "Jan",
+ 2: "Feb",
+ 3: "Mar",
+ 4: "Apr",
+ 5: "May",
+ 6: "Jun",
+ 7: "Jul",
+ 8: "Aug",
+ 9: "Sep",
+ 10: "Oct",
+ 11: "Nov",
+ 12: "Dec",
+ }
+ validation = {
+ "min_length": lambda x, y: len(x) < y,
+ "max_length": lambda x, y: len(x) > y,
+ "minimum": lambda x, y: x < y,
+ "maximum": lambda x, y: x > y,
+ "minimum_ex": lambda x, y: x <= y,
+ "maximum_ex": lambda x, y: x >= y,
+ "min_items": lambda x, y: len(x) < y,
+ "max_items": lambda x, y: len(x) > y,
+ "pattern": lambda x, y: not re.match(y, x, re.UNICODE),
+ "unique": lambda x, y: len(x) != len(set(x)),
+ "multiple": lambda x, y: x % y != 0,
+ }
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.serialize_type = {
+ "iso-8601": Serializer.serialize_iso,
+ "rfc-1123": Serializer.serialize_rfc,
+ "unix-time": Serializer.serialize_unix,
+ "duration": Serializer.serialize_duration,
+ "date": Serializer.serialize_date,
+ "time": Serializer.serialize_time,
+ "decimal": Serializer.serialize_decimal,
+ "long": Serializer.serialize_long,
+ "bytearray": Serializer.serialize_bytearray,
+ "base64": Serializer.serialize_base64,
+ "object": self.serialize_object,
+ "[]": self.serialize_iter,
+ "{}": self.serialize_dict,
+ }
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.key_transformer = full_restapi_key_transformer
+ self.client_side_validation = True
+
+ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals
+ self, target_obj, data_type=None, **kwargs
+ ):
+ """Serialize data into a string according to type.
+
+ :param object target_obj: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, dict
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ """
+ key_transformer = kwargs.get("key_transformer", self.key_transformer)
+ keep_readonly = kwargs.get("keep_readonly", False)
+ if target_obj is None:
+ return None
+
+ attr_name = None
+ class_name = target_obj.__class__.__name__
+
+ if data_type:
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ if not hasattr(target_obj, "_attribute_map"):
+ data_type = type(target_obj).__name__
+ if data_type in self.basic_types.values():
+ return self.serialize_data(target_obj, data_type, **kwargs)
+
+ # Force "is_xml" kwargs if we detect a XML model
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model())
+
+ serialized = {}
+ if is_xml_model_serialization:
+ serialized = target_obj._create_xml_node() # pylint: disable=protected-access
+ try:
+ attributes = target_obj._attribute_map # pylint: disable=protected-access
+ for attr, attr_desc in attributes.items():
+ attr_name = attr
+ if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access
+ attr_name, {}
+ ).get("readonly", False):
+ continue
+
+ if attr_name == "additional_properties" and attr_desc["key"] == "":
+ if target_obj.additional_properties is not None:
+ serialized.update(target_obj.additional_properties)
+ continue
+ try:
+
+ orig_attr = getattr(target_obj, attr)
+ if is_xml_model_serialization:
+ pass # Don't provide "transformer" for XML for now. Keep "orig_attr"
+ else: # JSON
+ keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr)
+ keys = keys if isinstance(keys, list) else [keys]
+
+ kwargs["serialization_ctxt"] = attr_desc
+ new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs)
+
+ if is_xml_model_serialization:
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+ xml_prefix = xml_desc.get("prefix", None)
+ xml_ns = xml_desc.get("ns", None)
+ if xml_desc.get("attr", False):
+ if xml_ns:
+ ET.register_namespace(xml_prefix, xml_ns)
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ serialized.set(xml_name, new_attr) # type: ignore
+ continue
+ if xml_desc.get("text", False):
+ serialized.text = new_attr # type: ignore
+ continue
+ if isinstance(new_attr, list):
+ serialized.extend(new_attr) # type: ignore
+ elif isinstance(new_attr, ET.Element):
+ # If the down XML has no XML/Name,
+ # we MUST replace the tag with the local tag. But keeping the namespaces.
+ if "name" not in getattr(orig_attr, "_xml_map", {}):
+ splitted_tag = new_attr.tag.split("}")
+ if len(splitted_tag) == 2: # Namespace
+ new_attr.tag = "}".join([splitted_tag[0], xml_name])
+ else:
+ new_attr.tag = xml_name
+ serialized.append(new_attr) # type: ignore
+ else: # That's a basic type
+ # Integrate namespace if necessary
+ local_node = _create_xml_node(xml_name, xml_prefix, xml_ns)
+ local_node.text = str(new_attr)
+ serialized.append(local_node) # type: ignore
+ else: # JSON
+ for k in reversed(keys): # type: ignore
+ new_attr = {k: new_attr}
+
+ _new_attr = new_attr
+ _serialized = serialized
+ for k in keys: # type: ignore
+ if k not in _serialized:
+ _serialized.update(_new_attr) # type: ignore
+ _new_attr = _new_attr[k] # type: ignore
+ _serialized = _serialized[k]
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+
+ except (AttributeError, KeyError, TypeError) as err:
+ msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj))
+ raise SerializationError(msg) from err
+ return serialized
+
+ def body(self, data, data_type, **kwargs):
+ """Serialize data intended for a request body.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: dict
+ :raises SerializationError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized request body
+ """
+
+ # Just in case this is a dict
+ internal_data_type_str = data_type.strip("[]{}")
+ internal_data_type = self.dependencies.get(internal_data_type_str, None)
+ try:
+ is_xml_model_serialization = kwargs["is_xml"]
+ except KeyError:
+ if internal_data_type and issubclass(internal_data_type, Model):
+ is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model())
+ else:
+ is_xml_model_serialization = False
+ if internal_data_type and not isinstance(internal_data_type, Enum):
+ try:
+ deserializer = Deserializer(self.dependencies)
+ # Since it's on serialization, it's almost sure that format is not JSON REST
+ # We're not able to deal with additional properties for now.
+ deserializer.additional_properties_detection = False
+ if is_xml_model_serialization:
+ deserializer.key_extractors = [ # type: ignore
+ attribute_key_case_insensitive_extractor,
+ ]
+ else:
+ deserializer.key_extractors = [
+ rest_key_case_insensitive_extractor,
+ attribute_key_case_insensitive_extractor,
+ last_rest_key_case_insensitive_extractor,
+ ]
+ data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access
+ except DeserializationError as err:
+ raise SerializationError("Unable to build a model: " + str(err)) from err
+
+ return self._serialize(data, data_type, **kwargs)
+
+ def url(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL path.
+
+ :param str name: The name of the URL path parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :returns: The serialized URL path
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ """
+ try:
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ output = output.replace("{", quote("{")).replace("}", quote("}"))
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return output
+
+ def query(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a URL query.
+
+ :param str name: The name of the query parameter.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str, list
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized query parameter
+ """
+ try:
+ # Treat the list aside, since we don't want to encode the div separator
+ if data_type.startswith("["):
+ internal_data_type = data_type[1:-1]
+ do_quote = not kwargs.get("skip_quote", False)
+ return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)
+
+ # Not a list, regular serialization
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ if kwargs.get("skip_quote") is True:
+ output = str(output)
+ else:
+ output = quote(str(output), safe="")
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def header(self, name, data, data_type, **kwargs):
+ """Serialize data intended for a request header.
+
+ :param str name: The name of the header.
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :rtype: str
+ :raises TypeError: if serialization fails.
+ :raises ValueError: if data is None
+ :returns: The serialized header
+ """
+ try:
+ if data_type in ["[str]"]:
+ data = ["" if d is None else d for d in data]
+
+ output = self.serialize_data(data, data_type, **kwargs)
+ if data_type == "bool":
+ output = json.dumps(output)
+ except SerializationError as exc:
+ raise TypeError("{} must be type {}.".format(name, data_type)) from exc
+ return str(output)
+
+ def serialize_data(self, data, data_type, **kwargs):
+ """Serialize generic data according to supplied data type.
+
+ :param object data: The data to be serialized.
+ :param str data_type: The type to be serialized from.
+ :raises AttributeError: if required data is None.
+ :raises ValueError: if data is None
+ :raises SerializationError: if serialization fails.
+ :returns: The serialized data.
+ :rtype: str, int, float, bool, dict, list
+ """
+ if data is None:
+ raise ValueError("No value for given attribute")
+
+ try:
+ if data is CoreNull:
+ return None
+ if data_type in self.basic_types.values():
+ return self.serialize_basic(data, data_type, **kwargs)
+
+ if data_type in self.serialize_type:
+ return self.serialize_type[data_type](data, **kwargs)
+
+ # If dependencies is empty, try with current data class
+ # It has to be a subclass of Enum anyway
+ enum_type = self.dependencies.get(data_type, data.__class__)
+ if issubclass(enum_type, Enum):
+ return Serializer.serialize_enum(data, enum_obj=enum_type)
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.serialize_type:
+ return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs)
+
+ except (ValueError, TypeError) as err:
+ msg = "Unable to serialize value: {!r} as type: {!r}."
+ raise SerializationError(msg.format(data, data_type)) from err
+ return self._serialize(data, **kwargs)
+
+ @classmethod
+ def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements
+ custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type)
+ if custom_serializer:
+ return custom_serializer
+ if kwargs.get("is_xml", False):
+ return cls._xml_basic_types_serializers.get(data_type)
+
+ @classmethod
+ def serialize_basic(cls, data, data_type, **kwargs):
+ """Serialize basic builting data type.
+ Serializes objects to str, int, float or bool.
+
+ Possible kwargs:
+ - basic_types_serializers dict[str, callable] : If set, use the callable as serializer
+ - is_xml bool : If set, use xml_basic_types_serializers
+
+ :param obj data: Object to be serialized.
+ :param str data_type: Type of object in the iterable.
+ :rtype: str, int, float, bool
+ :return: serialized object
+ """
+ custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
+ if custom_serializer:
+ return custom_serializer(data)
+ if data_type == "str":
+ return cls.serialize_unicode(data)
+ return eval(data_type)(data) # nosec # pylint: disable=eval-used
+
+ @classmethod
+ def serialize_unicode(cls, data):
+ """Special handling for serializing unicode strings in Py2.
+ Encode to UTF-8 if unicode, otherwise handle as a str.
+
+ :param str data: Object to be serialized.
+ :rtype: str
+ :return: serialized object
+ """
+ try: # If I received an enum, return its value
+ return data.value
+ except AttributeError:
+ pass
+
+ try:
+ if isinstance(data, unicode): # type: ignore
+ # Don't change it, JSON and XML ElementTree are totally able
+ # to serialize correctly u'' strings
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ def serialize_iter(self, data, iter_type, div=None, **kwargs):
+ """Serialize iterable.
+
+ Supported kwargs:
+ - serialization_ctxt dict : The current entry of _attribute_map, or same format.
+ serialization_ctxt['type'] should be same as data_type.
+ - is_xml bool : If set, serialize as XML
+
+ :param list data: Object to be serialized.
+ :param str iter_type: Type of object in the iterable.
+ :param str div: If set, this str will be used to combine the elements
+ in the iterable into a combined string. Default is 'None'.
+ Defaults to False.
+ :rtype: list, str
+ :return: serialized iterable
+ """
+ if isinstance(data, str):
+ raise SerializationError("Refuse str type as a valid iter type.")
+
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ is_xml = kwargs.get("is_xml", False)
+
+ serialized = []
+ for d in data:
+ try:
+ serialized.append(self.serialize_data(d, iter_type, **kwargs))
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized.append(None)
+
+ if kwargs.get("do_quote", False):
+ serialized = ["" if s is None else quote(str(s), safe="") for s in serialized]
+
+ if div:
+ serialized = ["" if s is None else str(s) for s in serialized]
+ serialized = div.join(serialized)
+
+ if "xml" in serialization_ctxt or is_xml:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt.get("xml", {})
+ xml_name = xml_desc.get("name")
+ if not xml_name:
+ xml_name = serialization_ctxt["key"]
+
+ # Create a wrap node if necessary (use the fact that Element and list have "append")
+ is_wrapped = xml_desc.get("wrapped", False)
+ node_name = xml_desc.get("itemsName", xml_name)
+ if is_wrapped:
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ else:
+ final_result = []
+ # All list elements to "local_node"
+ for el in serialized:
+ if isinstance(el, ET.Element):
+ el_node = el
+ else:
+ el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ if el is not None: # Otherwise it writes "None" :-p
+ el_node.text = str(el)
+ final_result.append(el_node)
+ return final_result
+ return serialized
+
+ def serialize_dict(self, attr, dict_type, **kwargs):
+ """Serialize a dictionary of objects.
+
+ :param dict attr: Object to be serialized.
+ :param str dict_type: Type of object in the dictionary.
+ :rtype: dict
+ :return: serialized dictionary
+ """
+ serialization_ctxt = kwargs.get("serialization_ctxt", {})
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs)
+ except ValueError as err:
+ if isinstance(err, SerializationError):
+ raise
+ serialized[self.serialize_unicode(key)] = None
+
+ if "xml" in serialization_ctxt:
+ # XML serialization is more complicated
+ xml_desc = serialization_ctxt["xml"]
+ xml_name = xml_desc["name"]
+
+ final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None))
+ for key, value in serialized.items():
+ ET.SubElement(final_result, key).text = value
+ return final_result
+
+ return serialized
+
+ def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Serialize a generic object.
+ This will be handled as a dictionary. If object passed in is not
+ a basic type (str, int, float, dict, list) it will simply be
+ cast to str.
+
+ :param dict attr: Object to be serialized.
+ :rtype: dict or str
+ :return: serialized object
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ return attr
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs)
+ if obj_type is _long_type:
+ return self.serialize_long(attr)
+ if obj_type is str:
+ return self.serialize_unicode(attr)
+ if obj_type is datetime.datetime:
+ return self.serialize_iso(attr)
+ if obj_type is datetime.date:
+ return self.serialize_date(attr)
+ if obj_type is datetime.time:
+ return self.serialize_time(attr)
+ if obj_type is datetime.timedelta:
+ return self.serialize_duration(attr)
+ if obj_type is decimal.Decimal:
+ return self.serialize_decimal(attr)
+
+ # If it's a model or I know this dependency, serialize as a Model
+ if obj_type in self.dependencies.values() or isinstance(attr, Model):
+ return self._serialize(attr)
+
+ if obj_type == dict:
+ serialized = {}
+ for key, value in attr.items():
+ try:
+ serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs)
+ except ValueError:
+ serialized[self.serialize_unicode(key)] = None
+ return serialized
+
+ if obj_type == list:
+ serialized = []
+ for obj in attr:
+ try:
+ serialized.append(self.serialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return serialized
+ return str(attr)
+
+ @staticmethod
+ def serialize_enum(attr, enum_obj=None):
+ try:
+ result = attr.value
+ except AttributeError:
+ result = attr
+ try:
+ enum_obj(result) # type: ignore
+ return result
+ except ValueError as exc:
+ for enum_value in enum_obj: # type: ignore
+ if enum_value.value.lower() == str(attr).lower():
+ return enum_value.value
+ error = "{!r} is not valid value for enum {!r}"
+ raise SerializationError(error.format(attr, enum_obj)) from exc
+
+ @staticmethod
+ def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize bytearray into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ return b64encode(attr).decode()
+
+ @staticmethod
+ def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize str into base-64 string.
+
+ :param str attr: Object to be serialized.
+ :rtype: str
+ :return: serialized base64
+ """
+ encoded = b64encode(attr).decode("ascii")
+ return encoded.strip("=").replace("+", "-").replace("/", "_")
+
+ @staticmethod
+ def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Decimal object to float.
+
+ :param decimal attr: Object to be serialized.
+ :rtype: float
+ :return: serialized decimal
+ """
+ return float(attr)
+
+ @staticmethod
+ def serialize_long(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize long (Py2) or int (Py3).
+
+ :param int attr: Object to be serialized.
+ :rtype: int/long
+ :return: serialized long
+ """
+ return _long_type(attr)
+
+ @staticmethod
+ def serialize_date(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Date object into ISO-8601 formatted string.
+
+ :param Date attr: Object to be serialized.
+ :rtype: str
+ :return: serialized date
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_date(attr)
+ t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day)
+ return t
+
+ @staticmethod
+ def serialize_time(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Time object into ISO-8601 formatted string.
+
+ :param datetime.time attr: Object to be serialized.
+ :rtype: str
+ :return: serialized time
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_time(attr)
+ t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second)
+ if attr.microsecond:
+ t += ".{:02}".format(attr.microsecond)
+ return t
+
+ @staticmethod
+ def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize TimeDelta object into ISO-8601 formatted string.
+
+ :param TimeDelta attr: Object to be serialized.
+ :rtype: str
+ :return: serialized duration
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_duration(attr)
+ return isodate.duration_isoformat(attr)
+
+ @staticmethod
+ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into RFC-1123 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises TypeError: if format invalid.
+ :return: serialized rfc
+ """
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ except AttributeError as exc:
+ raise TypeError("RFC1123 object must be valid Datetime object.") from exc
+
+ return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format(
+ Serializer.days[utc.tm_wday],
+ utc.tm_mday,
+ Serializer.months[utc.tm_mon],
+ utc.tm_year,
+ utc.tm_hour,
+ utc.tm_min,
+ utc.tm_sec,
+ )
+
+ @staticmethod
+ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into ISO-8601 formatted string.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: str
+ :raises SerializationError: if format invalid.
+ :return: serialized iso
+ """
+ if isinstance(attr, str):
+ attr = isodate.parse_datetime(attr)
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ utc = attr.utctimetuple()
+ if utc.tm_year > 9999 or utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+
+ microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0")
+ if microseconds:
+ microseconds = "." + microseconds
+ date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(
+ utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec
+ )
+ return date + microseconds + "Z"
+ except (ValueError, OverflowError) as err:
+ msg = "Unable to serialize datetime object."
+ raise SerializationError(msg) from err
+ except AttributeError as err:
+ msg = "ISO-8601 object must be valid Datetime object."
+ raise TypeError(msg) from err
+
+ @staticmethod
+ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param Datetime attr: Object to be serialized.
+ :rtype: int
+ :raises SerializationError: if format invalid
+ :return: serialied unix
+ """
+ if isinstance(attr, int):
+ return attr
+ try:
+ if not attr.tzinfo:
+ _LOGGER.warning("Datetime with no tzinfo will be considered UTC.")
+ return int(calendar.timegm(attr.utctimetuple()))
+ except AttributeError as exc:
+ raise TypeError("Unix time object must be valid Datetime object.") from exc
+
+
+def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ # Need the cast, as for some reasons "split" is typed as list[str | Any]
+ dict_keys = cast(List[str], _FLATTEN.split(key))
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = working_data.get(working_key, data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ return working_data.get(key)
+
+
+def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements
+ attr, attr_desc, data
+):
+ key = attr_desc["key"]
+ working_data = data
+
+ while "." in key:
+ dict_keys = _FLATTEN.split(key)
+ if len(dict_keys) == 1:
+ key = _decode_attribute_map_key(dict_keys[0])
+ break
+ working_key = _decode_attribute_map_key(dict_keys[0])
+ working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data)
+ if working_data is None:
+ # If at any point while following flatten JSON path see None, it means
+ # that all properties under are None as well
+ return None
+ key = ".".join(dict_keys[1:])
+
+ if working_data:
+ return attribute_key_case_insensitive_extractor(key, None, working_data)
+
+
+def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_extractor(dict_keys[-1], None, data)
+
+
+def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument
+ """Extract the attribute in "data" based on the last part of the JSON path key.
+
+ This is the case insensitive version of "last_rest_key_extractor"
+ :param str attr: The attribute to extract
+ :param dict attr_desc: The attribute description
+ :param dict data: The data to extract from
+ :rtype: object
+ :returns: The extracted attribute
+ """
+ key = attr_desc["key"]
+ dict_keys = _FLATTEN.split(key)
+ return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data)
+
+
+def attribute_key_extractor(attr, _, data):
+ return data.get(attr)
+
+
+def attribute_key_case_insensitive_extractor(attr, _, data):
+ found_key = None
+ lower_attr = attr.lower()
+ for key in data:
+ if lower_attr == key.lower():
+ found_key = key
+ break
+
+ return data.get(found_key)
+
+
+def _extract_name_from_internal_type(internal_type):
+ """Given an internal type XML description, extract correct XML name with namespace.
+
+ :param dict internal_type: An model type
+ :rtype: tuple
+ :returns: A tuple XML name + namespace dict
+ """
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+ xml_name = internal_type_xml_map.get("name", internal_type.__name__)
+ xml_ns = internal_type_xml_map.get("ns", None)
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+ return xml_name
+
+
+def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements
+ if isinstance(data, dict):
+ return None
+
+ # Test if this model is XML ready first
+ if not isinstance(data, ET.Element):
+ return None
+
+ xml_desc = attr_desc.get("xml", {})
+ xml_name = xml_desc.get("name", attr_desc["key"])
+
+ # Look for a children
+ is_iter_type = attr_desc["type"].startswith("[")
+ is_wrapped = xml_desc.get("wrapped", False)
+ internal_type = attr_desc.get("internalType", None)
+ internal_type_xml_map = getattr(internal_type, "_xml_map", {})
+
+ # Integrate namespace if necessary
+ xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None))
+ if xml_ns:
+ xml_name = "{{{}}}{}".format(xml_ns, xml_name)
+
+ # If it's an attribute, that's simple
+ if xml_desc.get("attr", False):
+ return data.get(xml_name)
+
+ # If it's x-ms-text, that's simple too
+ if xml_desc.get("text", False):
+ return data.text
+
+ # Scenario where I take the local name:
+ # - Wrapped node
+ # - Internal type is an enum (considered basic types)
+ # - Internal type has no XML/Name node
+ if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)):
+ children = data.findall(xml_name)
+ # If internal type has a local name and it's not a list, I use that name
+ elif not is_iter_type and internal_type and "name" in internal_type_xml_map:
+ xml_name = _extract_name_from_internal_type(internal_type)
+ children = data.findall(xml_name)
+ # That's an array
+ else:
+ if internal_type: # Complex type, ignore itemsName and use the complex type name
+ items_name = _extract_name_from_internal_type(internal_type)
+ else:
+ items_name = xml_desc.get("itemsName", xml_name)
+ children = data.findall(items_name)
+
+ if len(children) == 0:
+ if is_iter_type:
+ if is_wrapped:
+ return None # is_wrapped no node, we want None
+ return [] # not wrapped, assume empty list
+ return None # Assume it's not there, maybe an optional node.
+
+ # If is_iter_type and not wrapped, return all found children
+ if is_iter_type:
+ if not is_wrapped:
+ return children
+ # Iter and wrapped, should have found one node only (the wrap one)
+ if len(children) != 1:
+ raise DeserializationError(
+ "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long
+ xml_name
+ )
+ )
+ return list(children[0]) # Might be empty list and that's ok.
+
+ # Here it's not a itertype, we should have found one element only or empty
+ if len(children) > 1:
+ raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name))
+ return children[0]
+
+
+class Deserializer:
+ """Response object model deserializer.
+
+ :param dict classes: Class type dictionary for deserializing complex types.
+ :ivar list key_extractors: Ordered list of extractors to be used by this deserializer.
+ """
+
+ basic_types = {str: "str", int: "int", bool: "bool", float: "float"}
+
+ valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?")
+
+ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None:
+ self.deserialize_type = {
+ "iso-8601": Deserializer.deserialize_iso,
+ "rfc-1123": Deserializer.deserialize_rfc,
+ "unix-time": Deserializer.deserialize_unix,
+ "duration": Deserializer.deserialize_duration,
+ "date": Deserializer.deserialize_date,
+ "time": Deserializer.deserialize_time,
+ "decimal": Deserializer.deserialize_decimal,
+ "long": Deserializer.deserialize_long,
+ "bytearray": Deserializer.deserialize_bytearray,
+ "base64": Deserializer.deserialize_base64,
+ "object": self.deserialize_object,
+ "[]": self.deserialize_iter,
+ "{}": self.deserialize_dict,
+ }
+ self.deserialize_expected_types = {
+ "duration": (isodate.Duration, datetime.timedelta),
+ "iso-8601": (datetime.datetime),
+ }
+ self.dependencies: Dict[str, type] = dict(classes) if classes else {}
+ self.key_extractors = [rest_key_extractor, xml_key_extractor]
+ # Additional properties only works if the "rest_key_extractor" is used to
+ # extract the keys. Making it to work whatever the key extractor is too much
+ # complicated, with no real scenario for now.
+ # So adding a flag to disable additional properties detection. This flag should be
+ # used if your expect the deserialization to NOT come from a JSON REST syntax.
+ # Otherwise, result are unexpected
+ self.additional_properties_detection = True
+
+ def __call__(self, target_obj, response_data, content_type=None):
+ """Call the deserializer to process a REST response.
+
+ :param str target_obj: Target data type to deserialize to.
+ :param requests.Response response_data: REST response object.
+ :param str content_type: Swagger "produces" if available.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ data = self._unpack_content(response_data, content_type)
+ return self._deserialize(target_obj, data)
+
+ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements
+ """Call the deserializer on a model.
+
+ Data needs to be already deserialized as JSON or XML ElementTree
+
+ :param str target_obj: Target data type to deserialize to.
+ :param object data: Object to deserialize.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ # This is already a model, go recursive just in case
+ if hasattr(data, "_attribute_map"):
+ constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")]
+ try:
+ for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access
+ if attr in constants:
+ continue
+ value = getattr(data, attr)
+ if value is None:
+ continue
+ local_type = mapconfig["type"]
+ internal_data_type = local_type.strip("[]{}")
+ if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum):
+ continue
+ setattr(data, attr, self._deserialize(local_type, value))
+ return data
+ except AttributeError:
+ return
+
+ response, class_name = self._classify_target(target_obj, data)
+
+ if isinstance(response, str):
+ return self.deserialize_data(data, response)
+ if isinstance(response, type) and issubclass(response, Enum):
+ return self.deserialize_enum(data, response)
+
+ if data is None or data is CoreNull:
+ return data
+ try:
+ attributes = response._attribute_map # type: ignore # pylint: disable=protected-access
+ d_attrs = {}
+ for attr, attr_desc in attributes.items():
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"...
+ if attr == "additional_properties" and attr_desc["key"] == "":
+ continue
+ raw_value = None
+ # Enhance attr_desc with some dynamic data
+ attr_desc = attr_desc.copy() # Do a copy, do not change the real one
+ internal_data_type = attr_desc["type"].strip("[]{}")
+ if internal_data_type in self.dependencies:
+ attr_desc["internalType"] = self.dependencies[internal_data_type]
+
+ for key_extractor in self.key_extractors:
+ found_value = key_extractor(attr, attr_desc, data)
+ if found_value is not None:
+ if raw_value is not None and raw_value != found_value:
+ msg = (
+ "Ignoring extracted value '%s' from %s for key '%s'"
+ " (duplicate extraction, follow extractors order)"
+ )
+ _LOGGER.warning(msg, found_value, key_extractor, attr)
+ continue
+ raw_value = found_value
+
+ value = self.deserialize_data(raw_value, attr_desc["type"])
+ d_attrs[attr] = value
+ except (AttributeError, TypeError, KeyError) as err:
+ msg = "Unable to deserialize to object: " + class_name # type: ignore
+ raise DeserializationError(msg) from err
+ additional_properties = self._build_additional_properties(attributes, data)
+ return self._instantiate_model(response, d_attrs, additional_properties)
+
+ def _build_additional_properties(self, attribute_map, data):
+ if not self.additional_properties_detection:
+ return None
+ if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "":
+ # Check empty string. If it's not empty, someone has a real "additionalProperties"
+ return None
+ if isinstance(data, ET.Element):
+ data = {el.tag: el.text for el in data}
+
+ known_keys = {
+ _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0])
+ for desc in attribute_map.values()
+ if desc["key"] != ""
+ }
+ present_keys = set(data.keys())
+ missing_keys = present_keys - known_keys
+ return {key: data[key] for key in missing_keys}
+
+ def _classify_target(self, target, data):
+ """Check to see whether the deserialization target object can
+ be classified into a subclass.
+ Once classification has been determined, initialize object.
+
+ :param str target: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :return: The classified target object and its class name.
+ :rtype: tuple
+ """
+ if target is None:
+ return None, None
+
+ if isinstance(target, str):
+ try:
+ target = self.dependencies[target]
+ except KeyError:
+ return target, target
+
+ try:
+ target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access
+ except AttributeError:
+ pass # Target is not a Model, no classify
+ return target, target.__class__.__name__ # type: ignore
+
+ def failsafe_deserialize(self, target_obj, data, content_type=None):
+ """Ignores any errors encountered in deserialization,
+ and falls back to not deserializing the object. Recommended
+ for use in error deserialization, as we want to return the
+ HttpResponseError to users, and not have them deal with
+ a deserialization error.
+
+ :param str target_obj: The target object type to deserialize to.
+ :param str/dict data: The response data to deserialize.
+ :param str content_type: Swagger "produces" if available.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ try:
+ return self(target_obj, data, content_type=content_type)
+ except: # pylint: disable=bare-except
+ _LOGGER.debug(
+ "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True
+ )
+ return None
+
+ @staticmethod
+ def _unpack_content(raw_data, content_type=None):
+ """Extract the correct structure for deserialization.
+
+ If raw_data is a PipelineResponse, try to extract the result of RawDeserializer.
+ if we can't, raise. Your Pipeline should have a RawDeserializer.
+
+ If not a pipeline response and raw_data is bytes or string, use content-type
+ to decode it. If no content-type, try JSON.
+
+ If raw_data is something else, bypass all logic and return it directly.
+
+ :param obj raw_data: Data to be processed.
+ :param str content_type: How to parse if raw_data is a string/bytes.
+ :raises JSONDecodeError: If JSON is requested and parsing is impossible.
+ :raises UnicodeDecodeError: If bytes is not UTF8
+ :rtype: object
+ :return: Unpacked content.
+ """
+ # Assume this is enough to detect a Pipeline Response without importing it
+ context = getattr(raw_data, "context", {})
+ if context:
+ if RawDeserializer.CONTEXT_NAME in context:
+ return context[RawDeserializer.CONTEXT_NAME]
+ raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize")
+
+ # Assume this is enough to recognize universal_http.ClientResponse without importing it
+ if hasattr(raw_data, "body"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers)
+
+ # Assume this enough to recognize requests.Response without importing it.
+ if hasattr(raw_data, "_content_consumed"):
+ return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers)
+
+ if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"):
+ return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore
+ return raw_data
+
+ def _instantiate_model(self, response, attrs, additional_properties=None):
+ """Instantiate a response model passing in deserialized args.
+
+ :param Response response: The response model class.
+ :param dict attrs: The deserialized response attributes.
+ :param dict additional_properties: Additional properties to be set.
+ :rtype: Response
+ :return: The instantiated response model.
+ """
+ if callable(response):
+ subtype = getattr(response, "_subtype_map", {})
+ try:
+ readonly = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("readonly")
+ ]
+ const = [
+ k
+ for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore
+ if v.get("constant")
+ ]
+ kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const}
+ response_obj = response(**kwargs)
+ for attr in readonly:
+ setattr(response_obj, attr, attrs.get(attr))
+ if additional_properties:
+ response_obj.additional_properties = additional_properties # type: ignore
+ return response_obj
+ except TypeError as err:
+ msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore
+ raise DeserializationError(msg + str(err)) from err
+ else:
+ try:
+ for attr, value in attrs.items():
+ setattr(response, attr, value)
+ return response
+ except Exception as exp:
+ msg = "Unable to populate response model. "
+ msg += "Type: {}, Error: {}".format(type(response), exp)
+ raise DeserializationError(msg) from exp
+
+ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements
+ """Process data for deserialization according to data type.
+
+ :param str data: The response string to be deserialized.
+ :param str data_type: The type to deserialize to.
+ :raises DeserializationError: if deserialization fails.
+ :return: Deserialized object.
+ :rtype: object
+ """
+ if data is None:
+ return data
+
+ try:
+ if not data_type:
+ return data
+ if data_type in self.basic_types.values():
+ return self.deserialize_basic(data, data_type)
+ if data_type in self.deserialize_type:
+ if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())):
+ return data
+
+ is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment
+ "object",
+ "[]",
+ r"{}",
+ ]
+ if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text:
+ return None
+ data_val = self.deserialize_type[data_type](data)
+ return data_val
+
+ iter_type = data_type[0] + data_type[-1]
+ if iter_type in self.deserialize_type:
+ return self.deserialize_type[iter_type](data, data_type[1:-1])
+
+ obj_type = self.dependencies[data_type]
+ if issubclass(obj_type, Enum):
+ if isinstance(data, ET.Element):
+ data = data.text
+ return self.deserialize_enum(data, obj_type)
+
+ except (ValueError, TypeError, AttributeError) as err:
+ msg = "Unable to deserialize response data."
+ msg += " Data: {}, {}".format(data, data_type)
+ raise DeserializationError(msg) from err
+ return self._deserialize(obj_type, data)
+
+ def deserialize_iter(self, attr, iter_type):
+ """Deserialize an iterable.
+
+ :param list attr: Iterable to be deserialized.
+ :param str iter_type: The type of object in the iterable.
+ :return: Deserialized iterable.
+ :rtype: list
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element): # If I receive an element here, get the children
+ attr = list(attr)
+ if not isinstance(attr, (list, set)):
+ raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr)))
+ return [self.deserialize_data(a, iter_type) for a in attr]
+
+ def deserialize_dict(self, attr, dict_type):
+ """Deserialize a dictionary.
+
+ :param dict/list attr: Dictionary to be deserialized. Also accepts
+ a list of key, value pairs.
+ :param str dict_type: The object type of the items in the dictionary.
+ :return: Deserialized dictionary.
+ :rtype: dict
+ """
+ if isinstance(attr, list):
+ return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr}
+
+ if isinstance(attr, ET.Element):
+ # Transform <Key>value</Key> into {"Key": "value"}
+ attr = {el.tag: el.text for el in attr}
+ return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()}
+
+ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements
+ """Deserialize a generic object.
+ This will be handled as a dictionary.
+
+ :param dict attr: Dictionary to be deserialized.
+ :return: Deserialized object.
+ :rtype: dict
+ :raises TypeError: if non-builtin datatype encountered.
+ """
+ if attr is None:
+ return None
+ if isinstance(attr, ET.Element):
+ # Do no recurse on XML, just return the tree as-is
+ return attr
+ if isinstance(attr, str):
+ return self.deserialize_basic(attr, "str")
+ obj_type = type(attr)
+ if obj_type in self.basic_types:
+ return self.deserialize_basic(attr, self.basic_types[obj_type])
+ if obj_type is _long_type:
+ return self.deserialize_long(attr)
+
+ if obj_type == dict:
+ deserialized = {}
+ for key, value in attr.items():
+ try:
+ deserialized[key] = self.deserialize_object(value, **kwargs)
+ except ValueError:
+ deserialized[key] = None
+ return deserialized
+
+ if obj_type == list:
+ deserialized = []
+ for obj in attr:
+ try:
+ deserialized.append(self.deserialize_object(obj, **kwargs))
+ except ValueError:
+ pass
+ return deserialized
+
+ error = "Cannot deserialize generic object with type: "
+ raise TypeError(error + str(obj_type))
+
+ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements
+ """Deserialize basic builtin data type from string.
+ Will attempt to convert to str, int, float and bool.
+ This function will also accept '1', '0', 'true' and 'false' as
+ valid bool values.
+
+ :param str attr: response string to be deserialized.
+ :param str data_type: deserialization data type.
+ :return: Deserialized basic type.
+ :rtype: str, int, float or bool
+ :raises TypeError: if string format is not valid.
+ """
+ # If we're here, data is supposed to be a basic type.
+ # If it's still an XML node, take the text
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if not attr:
+ if data_type == "str":
+ # None or '', node <a/> is empty string.
+ return ""
+ # None or '', node <a/> with a strong type is None.
+ # Don't try to model "empty bool" or "empty int"
+ return None
+
+ if data_type == "bool":
+ if attr in [True, False, 1, 0]:
+ return bool(attr)
+ if isinstance(attr, str):
+ if attr.lower() in ["true", "1"]:
+ return True
+ if attr.lower() in ["false", "0"]:
+ return False
+ raise TypeError("Invalid boolean value: {}".format(attr))
+
+ if data_type == "str":
+ return self.deserialize_unicode(attr)
+ return eval(data_type)(attr) # nosec # pylint: disable=eval-used
+
+ @staticmethod
+ def deserialize_unicode(data):
+ """Preserve unicode objects in Python 2, otherwise return data
+ as a string.
+
+ :param str data: response string to be deserialized.
+ :return: Deserialized string.
+ :rtype: str or unicode
+ """
+ # We might be here because we have an enum modeled as string,
+ # and we try to deserialize a partial dict with enum inside
+ if isinstance(data, Enum):
+ return data
+
+ # Consider this is real string
+ try:
+ if isinstance(data, unicode): # type: ignore
+ return data
+ except NameError:
+ return str(data)
+ return str(data)
+
+ @staticmethod
+ def deserialize_enum(data, enum_obj):
+ """Deserialize string into enum object.
+
+ If the string is not a valid enum value it will be returned as-is
+ and a warning will be logged.
+
+ :param str data: Response string to be deserialized. If this value is
+ None or invalid it will be returned as-is.
+ :param Enum enum_obj: Enum object to deserialize to.
+ :return: Deserialized enum object.
+ :rtype: Enum
+ """
+ if isinstance(data, enum_obj) or data is None:
+ return data
+ if isinstance(data, Enum):
+ data = data.value
+ if isinstance(data, int):
+ # Workaround. We might consider remove it in the future.
+ try:
+ return list(enum_obj.__members__.values())[data]
+ except IndexError as exc:
+ error = "{!r} is not a valid index for enum {!r}"
+ raise DeserializationError(error.format(data, enum_obj)) from exc
+ try:
+ return enum_obj(str(data))
+ except ValueError:
+ for enum_value in enum_obj:
+ if enum_value.value.lower() == str(data).lower():
+ return enum_value
+ # We don't fail anymore for unknown value, we deserialize as a string
+ _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj)
+ return Deserializer.deserialize_unicode(data)
+
+ @staticmethod
+ def deserialize_bytearray(attr):
+ """Deserialize string into bytearray.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized bytearray
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return bytearray(b64decode(attr)) # type: ignore
+
+ @staticmethod
+ def deserialize_base64(attr):
+ """Deserialize base64 encoded string into string.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized base64 string
+ :rtype: bytearray
+ :raises TypeError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore
+ attr = attr + padding # type: ignore
+ encoded = attr.replace("-", "+").replace("_", "/")
+ return b64decode(encoded)
+
+ @staticmethod
+ def deserialize_decimal(attr):
+ """Deserialize string into Decimal object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized decimal
+ :raises DeserializationError: if string format invalid.
+ :rtype: decimal
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ return decimal.Decimal(str(attr)) # type: ignore
+ except decimal.DecimalException as err:
+ msg = "Invalid decimal {}".format(attr)
+ raise DeserializationError(msg) from err
+
+ @staticmethod
+ def deserialize_long(attr):
+ """Deserialize string into long (Py2) or int (Py3).
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized int
+ :rtype: long or int
+ :raises ValueError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ return _long_type(attr) # type: ignore
+
+ @staticmethod
+ def deserialize_duration(attr):
+ """Deserialize ISO-8601 formatted string into TimeDelta object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized duration
+ :rtype: TimeDelta
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ duration = isodate.parse_duration(attr)
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize duration object."
+ raise DeserializationError(msg) from err
+ return duration
+
+ @staticmethod
+ def deserialize_date(attr):
+ """Deserialize ISO-8601 formatted string into Date object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized date
+ :rtype: Date
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception.
+ return isodate.parse_date(attr, defaultmonth=0, defaultday=0)
+
+ @staticmethod
+ def deserialize_time(attr):
+ """Deserialize ISO-8601 formatted string into time object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized time
+ :rtype: datetime.time
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore
+ raise DeserializationError("Date must have only digits and -. Received: %s" % attr)
+ return isodate.parse_time(attr)
+
+ @staticmethod
+ def deserialize_rfc(attr):
+ """Deserialize RFC-1123 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized RFC datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ parsed_date = email.utils.parsedate_tz(attr) # type: ignore
+ date_obj = datetime.datetime(
+ *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60))
+ )
+ if not date_obj.tzinfo:
+ date_obj = date_obj.astimezone(tz=TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to rfc datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_iso(attr):
+ """Deserialize ISO-8601 formatted string into Datetime object.
+
+ :param str attr: response string to be deserialized.
+ :return: Deserialized ISO datetime
+ :rtype: Datetime
+ :raises DeserializationError: if string format invalid.
+ """
+ if isinstance(attr, ET.Element):
+ attr = attr.text
+ try:
+ attr = attr.upper() # type: ignore
+ match = Deserializer.valid_date.match(attr)
+ if not match:
+ raise ValueError("Invalid datetime string: " + attr)
+
+ check_decimal = attr.split(".")
+ if len(check_decimal) > 1:
+ decimal_str = ""
+ for digit in check_decimal[1]:
+ if digit.isdigit():
+ decimal_str += digit
+ else:
+ break
+ if len(decimal_str) > 6:
+ attr = attr.replace(decimal_str, decimal_str[0:6])
+
+ date_obj = isodate.parse_datetime(attr)
+ test_utc = date_obj.utctimetuple()
+ if test_utc.tm_year > 9999 or test_utc.tm_year < 1:
+ raise OverflowError("Hit max or min date")
+ except (ValueError, OverflowError, AttributeError) as err:
+ msg = "Cannot deserialize datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
+
+ @staticmethod
+ def deserialize_unix(attr):
+ """Serialize Datetime object into IntTime format.
+ This is represented as seconds.
+
+ :param int attr: Object to be serialized.
+ :return: Deserialized datetime
+ :rtype: Datetime
+ :raises DeserializationError: if format invalid
+ """
+ if isinstance(attr, ET.Element):
+ attr = int(attr.text) # type: ignore
+ try:
+ attr = int(attr)
+ date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC)
+ except ValueError as err:
+ msg = "Cannot deserialize to unix datetime object."
+ raise DeserializationError(msg) from err
+ return date_obj
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/__init__.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/__init__.py
new file mode 100644
index 00000000..b4f1dd31
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/__init__.py
@@ -0,0 +1,29 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._azure_file_storage import AzureFileStorage # type: ignore
+
+try:
+ from ._patch import __all__ as _patch_all
+ from ._patch import *
+except ImportError:
+ _patch_all = []
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "AzureFileStorage",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_azure_file_storage.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_azure_file_storage.py
new file mode 100644
index 00000000..22c80eec
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_azure_file_storage.py
@@ -0,0 +1,132 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from copy import deepcopy
+from typing import Any, Awaitable, Optional, Union
+from typing_extensions import Self
+
+from azure.core import AsyncPipelineClient
+from azure.core.pipeline import policies
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+
+from .. import models as _models
+from .._serialization import Deserializer, Serializer
+from ._configuration import AzureFileStorageConfiguration
+from .operations import DirectoryOperations, FileOperations, ServiceOperations, ShareOperations
+
+
+class AzureFileStorage: # pylint: disable=client-accepts-api-version-keyword
+ """AzureFileStorage.
+
+ :ivar service: ServiceOperations operations
+ :vartype service: azure.storage.fileshare.aio.operations.ServiceOperations
+ :ivar share: ShareOperations operations
+ :vartype share: azure.storage.fileshare.aio.operations.ShareOperations
+ :ivar directory: DirectoryOperations operations
+ :vartype directory: azure.storage.fileshare.aio.operations.DirectoryOperations
+ :ivar file: FileOperations operations
+ :vartype file: azure.storage.fileshare.aio.operations.FileOperations
+ :param url: The URL of the service account, share, directory or file that is the target of the
+ desired operation. Required.
+ :type url: str
+ :param base_url: Service URL. Required. Default value is "".
+ :type base_url: str
+ :param file_request_intent: Valid value is backup. "backup" Default value is None.
+ :type file_request_intent: str or ~azure.storage.fileshare.models.ShareTokenIntent
+ :param allow_trailing_dot: If true, the trailing dot will not be trimmed from the target URI.
+ Default value is None.
+ :type allow_trailing_dot: bool
+ :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source
+ URI. Default value is None.
+ :type allow_source_trailing_dot: bool
+ :keyword version: Specifies the version of the operation to use for this request. Default value
+ is "2025-05-05". Note that overriding this default value may result in unsupported behavior.
+ :paramtype version: str
+ :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
+ downloaded from the source url into the specified range. Default value is "update". Note that
+ overriding this default value may result in unsupported behavior.
+ :paramtype file_range_write_from_url: str
+ """
+
+ def __init__( # pylint: disable=missing-client-constructor-parameter-credential
+ self,
+ url: str,
+ base_url: str = "",
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ self._config = AzureFileStorageConfiguration(
+ url=url,
+ file_request_intent=file_request_intent,
+ allow_trailing_dot=allow_trailing_dot,
+ allow_source_trailing_dot=allow_source_trailing_dot,
+ **kwargs
+ )
+ _policies = kwargs.pop("policies", None)
+ if _policies is None:
+ _policies = [
+ policies.RequestIdPolicy(**kwargs),
+ self._config.headers_policy,
+ self._config.user_agent_policy,
+ self._config.proxy_policy,
+ policies.ContentDecodePolicy(**kwargs),
+ self._config.redirect_policy,
+ self._config.retry_policy,
+ self._config.authentication_policy,
+ self._config.custom_hook_policy,
+ self._config.logging_policy,
+ policies.DistributedTracingPolicy(**kwargs),
+ policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None,
+ self._config.http_logging_policy,
+ ]
+ self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=base_url, policies=_policies, **kwargs)
+
+ client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+ self._serialize.client_side_validation = False
+ self.service = ServiceOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.share = ShareOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.directory = DirectoryOperations(self._client, self._config, self._serialize, self._deserialize)
+ self.file = FileOperations(self._client, self._config, self._serialize, self._deserialize)
+
+ def _send_request(
+ self, request: HttpRequest, *, stream: bool = False, **kwargs: Any
+ ) -> Awaitable[AsyncHttpResponse]:
+ """Runs the network request through the client's chained policies.
+
+ >>> from azure.core.rest import HttpRequest
+ >>> request = HttpRequest("GET", "https://www.example.org/")
+ <HttpRequest [GET], url: 'https://www.example.org/'>
+ >>> response = await client._send_request(request)
+ <AsyncHttpResponse: 200 OK>
+
+ For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
+
+ :param request: The network request you want to make. Required.
+ :type request: ~azure.core.rest.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to False.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.rest.AsyncHttpResponse
+ """
+
+ request_copy = deepcopy(request)
+ request_copy.url = self._client.format_url(request_copy.url)
+ return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> Self:
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details: Any) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_configuration.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_configuration.py
new file mode 100644
index 00000000..7cc25f81
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_configuration.py
@@ -0,0 +1,77 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Literal, Optional, Union
+
+from azure.core.pipeline import policies
+
+from .. import models as _models
+
+VERSION = "unknown"
+
+
+class AzureFileStorageConfiguration: # pylint: disable=too-many-instance-attributes
+ """Configuration for AzureFileStorage.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param url: The URL of the service account, share, directory or file that is the target of the
+ desired operation. Required.
+ :type url: str
+ :param file_request_intent: Valid value is backup. "backup" Default value is None.
+ :type file_request_intent: str or ~azure.storage.fileshare.models.ShareTokenIntent
+ :param allow_trailing_dot: If true, the trailing dot will not be trimmed from the target URI.
+ Default value is None.
+ :type allow_trailing_dot: bool
+ :param allow_source_trailing_dot: If true, the trailing dot will not be trimmed from the source
+ URI. Default value is None.
+ :type allow_source_trailing_dot: bool
+ :keyword version: Specifies the version of the operation to use for this request. Default value
+ is "2025-05-05". Note that overriding this default value may result in unsupported behavior.
+ :paramtype version: str
+ :keyword file_range_write_from_url: Only update is supported: - Update: Writes the bytes
+ downloaded from the source url into the specified range. Default value is "update". Note that
+ overriding this default value may result in unsupported behavior.
+ :paramtype file_range_write_from_url: str
+ """
+
+ def __init__(
+ self,
+ url: str,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ version: Literal["2025-05-05"] = kwargs.pop("version", "2025-05-05")
+ file_range_write_from_url: Literal["update"] = kwargs.pop("file_range_write_from_url", "update")
+
+ if url is None:
+ raise ValueError("Parameter 'url' must not be None.")
+
+ self.url = url
+ self.file_request_intent = file_request_intent
+ self.allow_trailing_dot = allow_trailing_dot
+ self.allow_source_trailing_dot = allow_source_trailing_dot
+ self.version = version
+ self.file_range_write_from_url = file_range_write_from_url
+ kwargs.setdefault("sdk_moniker", "azurefilestorage/{}".format(VERSION))
+ self.polling_interval = kwargs.get("polling_interval", 30)
+ self._configure(**kwargs)
+
+ def _configure(self, **kwargs: Any) -> None:
+ self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
+ self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
+ self.authentication_policy = kwargs.get("authentication_policy")
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_patch.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/__init__.py
new file mode 100644
index 00000000..092b7efd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/__init__.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._service_operations import ServiceOperations # type: ignore
+from ._share_operations import ShareOperations # type: ignore
+from ._directory_operations import DirectoryOperations # type: ignore
+from ._file_operations import FileOperations # type: ignore
+
+from ._patch import __all__ as _patch_all
+from ._patch import *
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "ServiceOperations",
+ "ShareOperations",
+ "DirectoryOperations",
+ "FileOperations",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py
new file mode 100644
index 00000000..83a84985
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_directory_operations.py
@@ -0,0 +1,1056 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
+
+from azure.core import AsyncPipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._serialization import Deserializer, Serializer
+from ...operations._directory_operations import (
+ build_create_request,
+ build_delete_request,
+ build_force_close_handles_request,
+ build_get_properties_request,
+ build_list_files_and_directories_segment_request,
+ build_list_handles_request,
+ build_rename_request,
+ build_set_metadata_request,
+ build_set_properties_request,
+)
+from .._configuration import AzureFileStorageConfiguration
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class DirectoryOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.aio.AzureFileStorage`'s
+ :attr:`directory` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def create(
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new directory under the specified share or parent directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_properties(
+ self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all system properties for the specified directory, and can also be used to check the
+ existence of a directory. The data returned does not include the files in the directory or any
+ subdirectories.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def delete(self, timeout: Optional[int] = None, **kwargs: Any) -> None:
+ # pylint: disable=line-too-long
+ """Removes the specified empty directory. Note that the directory must be empty before it can be
+ deleted.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ url=self._config.url,
+ timeout=timeout,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def set_properties(
+ self,
+ timeout: Optional[int] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties on the directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def set_metadata(
+ self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Updates user defined metadata for the specified directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def list_files_and_directories_segment(
+ self,
+ prefix: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListFilesIncludeType]]] = None,
+ include_extended_info: Optional[bool] = None,
+ **kwargs: Any
+ ) -> _models.ListFilesAndDirectoriesSegmentResponse:
+ # pylint: disable=line-too-long
+ """Returns a list of files or directories under the specified share or directory. It lists the
+ contents only for a single level of the directory hierarchy.
+
+ :param prefix: Filters the results to return only entries whose name begins with the specified
+ prefix. Default value is None.
+ :type prefix: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param include: Include this parameter to specify one or more datasets to include in the
+ response. Default value is None.
+ :type include: list[str or ~azure.storage.fileshare.models.ListFilesIncludeType]
+ :param include_extended_info: Include extended information. Default value is None.
+ :type include_extended_info: bool
+ :return: ListFilesAndDirectoriesSegmentResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListFilesAndDirectoriesSegmentResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ cls: ClsType[_models.ListFilesAndDirectoriesSegmentResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_files_and_directories_segment_request(
+ url=self._config.url,
+ prefix=prefix,
+ sharesnapshot=sharesnapshot,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ include=include,
+ include_extended_info=include_extended_info,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def list_handles(
+ self,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ **kwargs: Any
+ ) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
+ """Lists handles for directory.
+
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param recursive: Specifies operation should apply to the directory specified in the URI, its
+ files, its subdirectories and their files. Default value is None.
+ :type recursive: bool
+ :return: ListHandlesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListHandlesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ cls: ClsType[_models.ListHandlesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_handles_request(
+ url=self._config.url,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ recursive=recursive,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def force_close_handles(
+ self,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Closes all handles open for given directory.
+
+ :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
+ (‘*’) is a wildcard that specifies all handles. Required.
+ :type handle_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param recursive: Specifies operation should apply to the directory specified in the URI, its
+ files, its subdirectories and their files. Default value is None.
+ :type recursive: bool
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_force_close_handles_request(
+ url=self._config.url,
+ handle_id=handle_id,
+ timeout=timeout,
+ marker=marker,
+ sharesnapshot=sharesnapshot,
+ recursive=recursive,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-marker"] = self._deserialize("str", response.headers.get("x-ms-marker"))
+ response_headers["x-ms-number-of-handles-closed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-closed")
+ )
+ response_headers["x-ms-number-of-handles-failed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-failed")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def rename(
+ self,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ source_lease_access_conditions: Optional[_models.SourceLeaseAccessConditions] = None,
+ destination_lease_access_conditions: Optional[_models.DestinationLeaseAccessConditions] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Renames a directory.
+
+ :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
+ length. Required.
+ :type rename_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param replace_if_exists: Optional. A boolean value for if the destination file already exists,
+ whether this request will overwrite the file or not. If true, the rename will succeed and will
+ overwrite the destination file. If not provided or if false and the destination file does
+ exist, the request will not overwrite the destination file. If provided and the destination
+ file doesn’t exist, the rename will succeed. Note: This value does not override the
+ x-ms-file-copy-ignore-read-only header value. Default value is None.
+ :type replace_if_exists: bool
+ :param ignore_read_only: Optional. A boolean value that specifies whether the ReadOnly
+ attribute on a preexisting destination file should be respected. If true, the rename will
+ succeed, otherwise, a previous file at the destination with the ReadOnly attribute set will
+ cause the rename to fail. Default value is None.
+ :type ignore_read_only: bool
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param source_lease_access_conditions: Parameter group. Default value is None.
+ :type source_lease_access_conditions:
+ ~azure.storage.fileshare.models.SourceLeaseAccessConditions
+ :param destination_lease_access_conditions: Parameter group. Default value is None.
+ :type destination_lease_access_conditions:
+ ~azure.storage.fileshare.models.DestinationLeaseAccessConditions
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_lease_id = None
+ _destination_lease_id = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ if source_lease_access_conditions is not None:
+ _source_lease_id = source_lease_access_conditions.source_lease_id
+ if destination_lease_access_conditions is not None:
+ _destination_lease_id = destination_lease_access_conditions.destination_lease_id
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+
+ _request = build_rename_request(
+ url=self._config.url,
+ rename_source=rename_source,
+ timeout=timeout,
+ replace_if_exists=replace_if_exists,
+ ignore_read_only=ignore_read_only,
+ source_lease_id=_source_lease_id,
+ destination_lease_id=_destination_lease_id,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ metadata=metadata,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_file_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_file_operations.py
new file mode 100644
index 00000000..93f91f24
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_file_operations.py
@@ -0,0 +1,2518 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, AsyncIterator, Callable, Dict, IO, Literal, Optional, TypeVar, Union
+
+from azure.core import AsyncPipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._serialization import Deserializer, Serializer
+from ...operations._file_operations import (
+ build_abort_copy_request,
+ build_acquire_lease_request,
+ build_break_lease_request,
+ build_change_lease_request,
+ build_create_hard_link_request,
+ build_create_request,
+ build_create_symbolic_link_request,
+ build_delete_request,
+ build_download_request,
+ build_force_close_handles_request,
+ build_get_properties_request,
+ build_get_range_list_request,
+ build_get_symbolic_link_request,
+ build_list_handles_request,
+ build_release_lease_request,
+ build_rename_request,
+ build_set_http_headers_request,
+ build_set_metadata_request,
+ build_start_copy_request,
+ build_upload_range_from_url_request,
+ build_upload_range_request,
+)
+from .._configuration import AzureFileStorageConfiguration
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class FileOperations: # pylint: disable=too-many-public-methods
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.aio.AzureFileStorage`'s
+ :attr:`file` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def create(
+ self,
+ file_content_length: int,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ nfs_file_type: Optional[Union[str, _models.NfsFileType]] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new file or replaces a file. Note it only initializes the file with no content.
+
+ :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required.
+ :type file_content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param nfs_file_type: Optional, NFS only. Type of the file or directory. Known values are:
+ "Regular", "Directory", and "SymLink". Default value is None.
+ :type nfs_file_type: str or ~azure.storage.fileshare.models.NfsFileType
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_content_type = None
+ _file_content_encoding = None
+ _file_content_language = None
+ _file_cache_control = None
+ _file_content_md5 = None
+ _file_content_disposition = None
+ _lease_id = None
+ if file_http_headers is not None:
+ _file_cache_control = file_http_headers.file_cache_control
+ _file_content_disposition = file_http_headers.file_content_disposition
+ _file_content_encoding = file_http_headers.file_content_encoding
+ _file_content_language = file_http_headers.file_content_language
+ _file_content_md5 = file_http_headers.file_content_md5
+ _file_content_type = file_http_headers.file_content_type
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_request(
+ url=self._config.url,
+ file_content_length=file_content_length,
+ timeout=timeout,
+ file_content_type=_file_content_type,
+ file_content_encoding=_file_content_encoding,
+ file_content_language=_file_content_language,
+ file_cache_control=_file_cache_control,
+ file_content_md5=_file_content_md5,
+ file_content_disposition=_file_content_disposition,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ nfs_file_type=nfs_file_type,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ file_type_constant=file_type_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def download(
+ self,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> AsyncIterator[bytes]:
+ # pylint: disable=line-too-long
+ """Reads or downloads a file from the system, including its metadata and properties.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param range: Return file data only from the specified byte range. Default value is None.
+ :type range: str
+ :param range_get_content_md5: When this header is set to true and specified together with the
+ Range header, the service returns the MD5 hash for the range, as long as the range is less than
+ or equal to 4 MB in size. Default value is None.
+ :type range_get_content_md5: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: AsyncIterator[bytes] or the result of cls(response)
+ :rtype: AsyncIterator[bytes]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_download_request(
+ url=self._config.url,
+ timeout=timeout,
+ range=range,
+ range_get_content_md5=range_get_content_md5,
+ structured_body_type=structured_body_type,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 206]:
+ try:
+ await response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length"))
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding"))
+ response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control"))
+ response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition"))
+ response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-completion-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-copy-completion-time")
+ )
+ response_headers["x-ms-copy-status-description"] = self._deserialize(
+ "str", response.headers.get("x-ms-copy-status-description")
+ )
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress"))
+ response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+ response_headers["x-ms-content-md5"] = self._deserialize("bytearray", response.headers.get("x-ms-content-md5"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def get_properties(
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all user-defined metadata, standard HTTP properties, and system properties for the
+ file. It does not return the content of the file.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["x-ms-type"] = self._deserialize("str", response.headers.get("x-ms-type"))
+ response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length"))
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding"))
+ response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control"))
+ response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition"))
+ response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-completion-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-copy-completion-time")
+ )
+ response_headers["x-ms-copy-status-description"] = self._deserialize(
+ "str", response.headers.get("x-ms-copy-status-description")
+ )
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress"))
+ response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def delete(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """removes the file from the storage account.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_delete_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def set_http_headers(
+ self,
+ timeout: Optional[int] = None,
+ file_content_length: Optional[int] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets HTTP headers on the file.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_content_length: Resizes a file to the specified size. If the specified byte value
+ is less than the current size of the file, then all ranges above the specified byte value are
+ cleared. Default value is None.
+ :type file_content_length: int
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_content_type = None
+ _file_content_encoding = None
+ _file_content_language = None
+ _file_cache_control = None
+ _file_content_md5 = None
+ _file_content_disposition = None
+ _lease_id = None
+ if file_http_headers is not None:
+ _file_cache_control = file_http_headers.file_cache_control
+ _file_content_disposition = file_http_headers.file_content_disposition
+ _file_content_encoding = file_http_headers.file_content_encoding
+ _file_content_language = file_http_headers.file_content_language
+ _file_content_md5 = file_http_headers.file_content_md5
+ _file_content_type = file_http_headers.file_content_type
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_http_headers_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_content_length=file_content_length,
+ file_content_type=_file_content_type,
+ file_content_encoding=_file_content_encoding,
+ file_content_language=_file_content_language,
+ file_cache_control=_file_cache_control,
+ file_content_md5=_file_content_md5,
+ file_content_disposition=_file_content_disposition,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def set_metadata(
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Updates user-defined metadata for the specified file.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def acquire_lease(
+ self,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a
+ lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease
+ duration cannot be changed using renew or change. Default value is None.
+ :type duration: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_acquire_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ duration=duration,
+ proposed_lease_id=proposed_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def release_lease(
+ self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_release_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def change_lease(
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_change_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ proposed_lease_id=proposed_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def break_lease(
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_break_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def upload_range(
+ self,
+ range: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ file_range_write: Union[str, _models.FileRangeWriteType] = "update",
+ content_md5: Optional[bytes] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ optionalbody: Optional[IO[bytes]] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Upload a range of bytes to a file.
+
+ :param range: Specifies the range of bytes to be written. Both the start and end of the range
+ must be specified. For an update operation, the range can be up to 4 MB in size. For a clear
+ operation, the range can be up to the value of the file's full size. The File service accepts
+ only a single byte range for the Range and 'x-ms-range' headers, and the byte range must be
+ specified in the following format: bytes=startByte-endByte. Required.
+ :type range: str
+ :param content_length: Specifies the number of bytes being transmitted in the request body.
+ When the x-ms-write header is set to clear, the value of this header must be set to zero.
+ Required.
+ :type content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_range_write: Specify one of the following options: - Update: Writes the bytes
+ specified by the request body into the specified range. The Range and Content-Length headers
+ must match to perform the update. - Clear: Clears the specified range and releases the space
+ used in storage for that range. To clear a range, set the Content-Length header to zero, and
+ set the Range header to a value that indicates the range to clear, up to maximum file size.
+ Known values are: "update" and "clear". Default value is "update".
+ :type file_range_write: str or ~azure.storage.fileshare.models.FileRangeWriteType
+ :param content_md5: An MD5 hash of the content. This hash is used to verify the integrity of
+ the data during transport. When the Content-MD5 header is specified, the File service compares
+ the hash of the content that has arrived with the header value that was sent. If the two hashes
+ do not match, the operation will fail with error code 400 (Bad Request). Default value is None.
+ :type content_md5: bytes
+ :param file_last_written_mode: If the file last write time should be preserved or overwritten.
+ Known values are: "Now" and "Preserve". Default value is None.
+ :type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :param optionalbody: Initial data. Default value is None.
+ :type optionalbody: IO[bytes]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+ _content = optionalbody
+
+ _request = build_upload_range_request(
+ url=self._config.url,
+ range=range,
+ content_length=content_length,
+ timeout=timeout,
+ file_range_write=file_range_write,
+ content_md5=content_md5,
+ lease_id=_lease_id,
+ file_last_written_mode=file_last_written_mode,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def upload_range_from_url(
+ self,
+ range: str,
+ copy_source: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ source_range: Optional[str] = None,
+ source_content_crc64: Optional[bytes] = None,
+ copy_source_authorization: Optional[str] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Upload a range of bytes to a file where the contents are read from a URL.
+
+ :param range: Writes data to the specified byte range in the file. Required.
+ :type range: str
+ :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
+ a file to another file within the same storage account, you may use Shared Key to authenticate
+ the source file. If you are copying a file from another storage account, or if you are copying
+ a blob from the same storage account or another storage account, then you must authenticate the
+ source file or blob using a shared access signature. If the source is a public blob, no
+ authentication is required to perform the copy operation. A file in a share snapshot can also
+ be specified as a copy source. Required.
+ :type copy_source: str
+ :param content_length: Specifies the number of bytes being transmitted in the request body.
+ When the x-ms-write header is set to clear, the value of this header must be set to zero.
+ Required.
+ :type content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param source_range: Bytes of source data in the specified range. Default value is None.
+ :type source_range: str
+ :param source_content_crc64: Specify the crc64 calculated for the range of bytes that must be
+ read from the copy source. Default value is None.
+ :type source_content_crc64: bytes
+ :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid
+ OAuth access token to copy source. Default value is None.
+ :type copy_source_authorization: str
+ :param file_last_written_mode: If the file last write time should be preserved or overwritten.
+ Known values are: "Now" and "Preserve". Default value is None.
+ :type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param source_modified_access_conditions: Parameter group. Default value is None.
+ :type source_modified_access_conditions:
+ ~azure.storage.fileshare.models.SourceModifiedAccessConditions
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_if_match_crc64 = None
+ _source_if_none_match_crc64 = None
+ _lease_id = None
+ if source_modified_access_conditions is not None:
+ _source_if_match_crc64 = source_modified_access_conditions.source_if_match_crc64
+ _source_if_none_match_crc64 = source_modified_access_conditions.source_if_none_match_crc64
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_upload_range_from_url_request(
+ url=self._config.url,
+ range=range,
+ copy_source=copy_source,
+ content_length=content_length,
+ timeout=timeout,
+ source_range=source_range,
+ source_content_crc64=source_content_crc64,
+ source_if_match_crc64=_source_if_match_crc64,
+ source_if_none_match_crc64=_source_if_none_match_crc64,
+ lease_id=_lease_id,
+ copy_source_authorization=copy_source_authorization,
+ file_last_written_mode=file_last_written_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ file_range_write_from_url=self._config.file_range_write_from_url,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-content-crc64"] = self._deserialize(
+ "bytearray", response.headers.get("x-ms-content-crc64")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_range_list(
+ self,
+ sharesnapshot: Optional[str] = None,
+ prevsharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ support_rename: Optional[bool] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> _models.ShareFileRangeList:
+ # pylint: disable=line-too-long
+ """Returns the list of valid ranges for a file.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param prevsharesnapshot: The previous snapshot parameter is an opaque DateTime value that,
+ when present, specifies the previous snapshot. Default value is None.
+ :type prevsharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param range: Specifies the range of bytes over which to list ranges, inclusively. Default
+ value is None.
+ :type range: str
+ :param support_rename: This header is allowed only when PrevShareSnapshot query parameter is
+ set. Determines whether the changed ranges for a file that has been renamed or moved between
+ the target snapshot (or the live file) and the previous snapshot should be listed. If the value
+ is true, the valid changed ranges for the file will be returned. If the value is false, the
+ operation will result in a failure with 409 (Conflict) response. The default value is false.
+ Default value is None.
+ :type support_rename: bool
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: ShareFileRangeList or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ShareFileRangeList
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rangelist"] = kwargs.pop("comp", _params.pop("comp", "rangelist"))
+ cls: ClsType[_models.ShareFileRangeList] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_range_list_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ prevsharesnapshot=prevsharesnapshot,
+ timeout=timeout,
+ range=range,
+ lease_id=_lease_id,
+ support_rename=support_rename,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-content-length"] = self._deserialize("int", response.headers.get("x-ms-content-length"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ShareFileRangeList", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def start_copy(
+ self,
+ copy_source: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ file_mode_copy_mode: Optional[Union[str, _models.ModeCopyMode]] = None,
+ file_owner_copy_mode: Optional[Union[str, _models.OwnerCopyMode]] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Copies a blob or file to a destination file within the storage account.
+
+ :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
+ a file to another file within the same storage account, you may use Shared Key to authenticate
+ the source file. If you are copying a file from another storage account, or if you are copying
+ a blob from the same storage account or another storage account, then you must authenticate the
+ source file or blob using a shared access signature. If the source is a public blob, no
+ authentication is required to perform the copy operation. A file in a share snapshot can also
+ be specified as a copy source. Required.
+ :type copy_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param file_mode_copy_mode: NFS only. Applicable only when the copy source is a File.
+ Determines the copy behavior of the mode bits of the file. source: The mode on the destination
+ file is copied from the source file. override: The mode on the destination file is determined
+ via the x-ms-mode header. Known values are: "source" and "override". Default value is None.
+ :type file_mode_copy_mode: str or ~azure.storage.fileshare.models.ModeCopyMode
+ :param file_owner_copy_mode: NFS only. Determines the copy behavior of the owner user
+ identifier (UID) and group identifier (GID) of the file. source: The owner user identifier
+ (UID) and group identifier (GID) on the destination file is copied from the source file.
+ override: The owner user identifier (UID) and group identifier (GID) on the destination file is
+ determined via the x-ms-owner and x-ms-group headers. Known values are: "source" and
+ "override". Default value is None.
+ :type file_owner_copy_mode: str or ~azure.storage.fileshare.models.OwnerCopyMode
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_permission_copy_mode = None
+ _ignore_read_only = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ _set_archive_attribute = None
+ _lease_id = None
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+ _file_permission_copy_mode = copy_file_smb_info.file_permission_copy_mode
+ _ignore_read_only = copy_file_smb_info.ignore_read_only
+ _set_archive_attribute = copy_file_smb_info.set_archive_attribute
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_start_copy_request(
+ url=self._config.url,
+ copy_source=copy_source,
+ timeout=timeout,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_permission_copy_mode=_file_permission_copy_mode,
+ ignore_read_only=_ignore_read_only,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ set_archive_attribute=_set_archive_attribute,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ file_mode_copy_mode=file_mode_copy_mode,
+ file_owner_copy_mode=file_owner_copy_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def abort_copy(
+ self,
+ copy_id: str,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Aborts a pending Copy File operation, and leaves a destination file with zero length and full
+ metadata.
+
+ :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy
+ File operation. Required.
+ :type copy_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy"))
+ copy_action_abort_constant: Literal["abort"] = kwargs.pop(
+ "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort")
+ )
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_abort_copy_request(
+ url=self._config.url,
+ copy_id=copy_id,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ copy_action_abort_constant=copy_action_abort_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def list_handles(
+ self,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ **kwargs: Any
+ ) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
+ """Lists handles for file.
+
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :return: ListHandlesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListHandlesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ cls: ClsType[_models.ListHandlesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_handles_request(
+ url=self._config.url,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def force_close_handles(
+ self,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Closes all handles open for given file.
+
+ :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
+ (‘*’) is a wildcard that specifies all handles. Required.
+ :type handle_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_force_close_handles_request(
+ url=self._config.url,
+ handle_id=handle_id,
+ timeout=timeout,
+ marker=marker,
+ sharesnapshot=sharesnapshot,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-marker"] = self._deserialize("str", response.headers.get("x-ms-marker"))
+ response_headers["x-ms-number-of-handles-closed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-closed")
+ )
+ response_headers["x-ms-number-of-handles-failed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-failed")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def rename(
+ self,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ source_lease_access_conditions: Optional[_models.SourceLeaseAccessConditions] = None,
+ destination_lease_access_conditions: Optional[_models.DestinationLeaseAccessConditions] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Renames a file.
+
+ :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
+ length. Required.
+ :type rename_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param replace_if_exists: Optional. A boolean value for if the destination file already exists,
+ whether this request will overwrite the file or not. If true, the rename will succeed and will
+ overwrite the destination file. If not provided or if false and the destination file does
+ exist, the request will not overwrite the destination file. If provided and the destination
+ file doesn’t exist, the rename will succeed. Note: This value does not override the
+ x-ms-file-copy-ignore-read-only header value. Default value is None.
+ :type replace_if_exists: bool
+ :param ignore_read_only: Optional. A boolean value that specifies whether the ReadOnly
+ attribute on a preexisting destination file should be respected. If true, the rename will
+ succeed, otherwise, a previous file at the destination with the ReadOnly attribute set will
+ cause the rename to fail. Default value is None.
+ :type ignore_read_only: bool
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param source_lease_access_conditions: Parameter group. Default value is None.
+ :type source_lease_access_conditions:
+ ~azure.storage.fileshare.models.SourceLeaseAccessConditions
+ :param destination_lease_access_conditions: Parameter group. Default value is None.
+ :type destination_lease_access_conditions:
+ ~azure.storage.fileshare.models.DestinationLeaseAccessConditions
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_lease_id = None
+ _destination_lease_id = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ _file_content_type = None
+ if source_lease_access_conditions is not None:
+ _source_lease_id = source_lease_access_conditions.source_lease_id
+ if destination_lease_access_conditions is not None:
+ _destination_lease_id = destination_lease_access_conditions.destination_lease_id
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+ if file_http_headers is not None:
+ _file_content_type = file_http_headers.file_content_type
+
+ _request = build_rename_request(
+ url=self._config.url,
+ rename_source=rename_source,
+ timeout=timeout,
+ replace_if_exists=replace_if_exists,
+ ignore_read_only=ignore_read_only,
+ source_lease_id=_source_lease_id,
+ destination_lease_id=_destination_lease_id,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ metadata=metadata,
+ file_content_type=_file_content_type,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def create_symbolic_link(
+ self,
+ link_text: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ request_id_parameter: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a symbolic link.
+
+ :param link_text: NFS only. Required. The path to the original file, the symbolic link is
+ pointing to. The path is of type string which is not resolved and is stored as is. The path can
+ be absolute path or the relative path depending on the content stored in the symbolic link
+ file. Required.
+ :type link_text: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_symbolic_link_request(
+ url=self._config.url,
+ link_text=link_text,
+ timeout=timeout,
+ metadata=metadata,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ request_id_parameter=request_id_parameter,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_symbolic_link(
+ self,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """get_symbolic_link.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_get_symbolic_link_request(
+ url=self._config.url,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-link-text"] = self._deserialize("str", response.headers.get("x-ms-link-text"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def create_hard_link(
+ self,
+ target_file: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a hard link.
+
+ :param target_file: NFS only. Required. Specifies the path of the target file to which the link
+ will be created, up to 2 KiB in length. It should be full path of the target from the root.The
+ target file must be in the same share and hence the same storage account. Required.
+ :type target_file: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["hardlink"] = kwargs.pop("restype", _params.pop("restype", "hardlink"))
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_hard_link_request(
+ url=self._config.url,
+ target_file=target_file,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ file_type_constant=file_type_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_service_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_service_operations.py
new file mode 100644
index 00000000..4814e221
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_service_operations.py
@@ -0,0 +1,284 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
+
+from azure.core import AsyncPipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._serialization import Deserializer, Serializer
+from ...operations._service_operations import (
+ build_get_properties_request,
+ build_list_shares_segment_request,
+ build_set_properties_request,
+)
+from .._configuration import AzureFileStorageConfiguration
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class ServiceOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.aio.AzureFileStorage`'s
+ :attr:`service` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def set_properties(
+ self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties for a storage account's File service endpoint, including properties for Storage
+ Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param storage_service_properties: The StorageService properties. Required.
+ :type storage_service_properties: ~azure.storage.fileshare.models.StorageServiceProperties
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True)
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
+ """Gets the properties of a storage account's File service, including properties for Storage
+ Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: StorageServiceProperties or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.StorageServiceProperties
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None)
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def list_shares_segment(
+ self,
+ prefix: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListSharesIncludeType]]] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.ListSharesResponse:
+ # pylint: disable=line-too-long
+ """The List Shares Segment operation returns a list of the shares and share snapshots under the
+ specified account.
+
+ :param prefix: Filters the results to return only entries whose name begins with the specified
+ prefix. Default value is None.
+ :type prefix: str
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param include: Include this parameter to specify one or more datasets to include in the
+ response. Default value is None.
+ :type include: list[str or ~azure.storage.fileshare.models.ListSharesIncludeType]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: ListSharesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListSharesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ cls: ClsType[_models.ListSharesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_shares_segment_request(
+ url=self._config.url,
+ prefix=prefix,
+ marker=marker,
+ maxresults=maxresults,
+ include=include,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ deserialized = self._deserialize("ListSharesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_share_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_share_operations.py
new file mode 100644
index 00000000..373424ef
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/aio/operations/_share_operations.py
@@ -0,0 +1,1765 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Literal, Optional, TypeVar, Union, overload
+
+from azure.core import AsyncPipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import AsyncHttpResponse, HttpRequest
+from azure.core.tracing.decorator_async import distributed_trace_async
+from azure.core.utils import case_insensitive_dict
+
+from ... import models as _models
+from ..._serialization import Deserializer, Serializer
+from ...operations._share_operations import (
+ build_acquire_lease_request,
+ build_break_lease_request,
+ build_change_lease_request,
+ build_create_permission_request,
+ build_create_request,
+ build_create_snapshot_request,
+ build_delete_request,
+ build_get_access_policy_request,
+ build_get_permission_request,
+ build_get_properties_request,
+ build_get_statistics_request,
+ build_release_lease_request,
+ build_renew_lease_request,
+ build_restore_request,
+ build_set_access_policy_request,
+ build_set_metadata_request,
+ build_set_properties_request,
+)
+from .._configuration import AzureFileStorageConfiguration
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+
+class ShareOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.aio.AzureFileStorage`'s
+ :attr:`share` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs) -> None:
+ input_args = list(args)
+ self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace_async
+ async def create(
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ enabled_protocols: Optional[str] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new share under the specified account. If the share with the same name already
+ exists, the operation fails.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
+ :type quota: int
+ :param access_tier: Specifies the access tier of the share. Known values are:
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
+ :type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
+ :param enabled_protocols: Protocols to enable on the share. Default value is None.
+ :type enabled_protocols: str
+ :param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
+ are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
+ :type root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :param enable_snapshot_virtual_directory_access: Default value is None.
+ :type enable_snapshot_virtual_directory_access: bool
+ :param paid_bursting_enabled: Optional. Boolean. Default if not specified is false. This
+ property enables paid bursting. Default value is None.
+ :type paid_bursting_enabled: bool
+ :param paid_bursting_max_bandwidth_mibps: Optional. Integer. Default if not specified is the
+ maximum throughput the file share can support. Current maximum for a file share is 10,340
+ MiB/sec. Default value is None.
+ :type paid_bursting_max_bandwidth_mibps: int
+ :param paid_bursting_max_iops: Optional. Integer. Default if not specified is the maximum IOPS
+ the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
+ None.
+ :type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ quota=quota,
+ access_tier=access_tier,
+ enabled_protocols=enabled_protocols,
+ root_squash=root_squash,
+ enable_snapshot_virtual_directory_access=enable_snapshot_virtual_directory_access,
+ paid_bursting_enabled=paid_bursting_enabled,
+ paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
+ paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_properties(
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all user-defined metadata and system properties for the specified share or share
+ snapshot. The data returned does not include the share's list of files.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-ingress-mbps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-ingress-mbps")
+ )
+ response_headers["x-ms-share-provisioned-egress-mbps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-egress-mbps")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier"))
+ response_headers["x-ms-access-tier-change-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-access-tier-change-time")
+ )
+ response_headers["x-ms-access-tier-transition-state"] = self._deserialize(
+ "str", response.headers.get("x-ms-access-tier-transition-state")
+ )
+ response_headers["x-ms-enabled-protocols"] = self._deserialize(
+ "str", response.headers.get("x-ms-enabled-protocols")
+ )
+ response_headers["x-ms-root-squash"] = self._deserialize("str", response.headers.get("x-ms-root-squash"))
+ response_headers["x-ms-enable-snapshot-virtual-directory-access"] = self._deserialize(
+ "bool", response.headers.get("x-ms-enable-snapshot-virtual-directory-access")
+ )
+ response_headers["x-ms-share-paid-bursting-enabled"] = self._deserialize(
+ "bool", response.headers.get("x-ms-share-paid-bursting-enabled")
+ )
+ response_headers["x-ms-share-paid-bursting-max-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-paid-bursting-max-iops")
+ )
+ response_headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-paid-bursting-max-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def delete(
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Operation marks the specified share or share snapshot for deletion. The share or share snapshot
+ and any files contained within it are later deleted during garbage collection.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param delete_snapshots: Specifies the option include to delete the base share and all of its
+ snapshots. Known values are: "include" and "include-leased". Default value is None.
+ :type delete_snapshots: str or ~azure.storage.fileshare.models.DeleteSnapshotsOptionType
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_delete_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ delete_snapshots=delete_snapshots,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-share-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-usage-bytes")
+ )
+ response_headers["x-ms-file-share-snapshot-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-snapshot-usage-bytes")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def acquire_lease(
+ self,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a
+ lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease
+ duration cannot be changed using renew or change. Default value is None.
+ :type duration: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_acquire_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ duration=duration,
+ proposed_lease_id=proposed_lease_id,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def release_lease(
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_release_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def change_lease(
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_change_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ proposed_lease_id=proposed_lease_id,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def renew_lease(
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_renew_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def break_lease(
+ self,
+ timeout: Optional[int] = None,
+ break_period: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param break_period: For a break operation, proposed duration the lease should continue before
+ it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter
+ than the time remaining on the lease. If longer, the time remaining on the lease is used. A new
+ lease will not be available before the break period has expired, but the lease may be held for
+ longer than the break period. If this header does not appear with a break operation, a
+ fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease
+ breaks immediately. Default value is None.
+ :type break_period: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_break_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ break_period=break_period,
+ lease_id=_lease_id,
+ request_id_parameter=request_id_parameter,
+ sharesnapshot=sharesnapshot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def create_snapshot(
+ self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a read-only snapshot of a share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_snapshot_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @overload
+ async def create_permission(
+ self,
+ share_permission: _models.SharePermission,
+ timeout: Optional[int] = None,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Required.
+ :type share_permission: ~azure.storage.fileshare.models.SharePermission
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ async def create_permission(
+ self,
+ share_permission: IO[bytes],
+ timeout: Optional[int] = None,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Required.
+ :type share_permission: IO[bytes]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace_async
+ async def create_permission(
+ self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Is either a
+ SharePermission type or a IO[bytes] type. Required.
+ :type share_permission: ~azure.storage.fileshare.models.SharePermission or IO[bytes]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(share_permission, (IOBase, bytes)):
+ _content = share_permission
+ else:
+ _json = self._serialize.body(share_permission, "SharePermission")
+
+ _request = build_create_permission_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_permission(
+ self,
+ file_permission_key: str,
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.SharePermission:
+ # pylint: disable=line-too-long
+ """Returns the permission (security descriptor) for a given key.
+
+ :param file_permission_key: Key of the permission to be set for the directory/file. Required.
+ :type file_permission_key: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: SharePermission or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.SharePermission
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ cls: ClsType[_models.SharePermission] = kwargs.pop("cls", None)
+
+ _request = build_get_permission_request(
+ url=self._config.url,
+ file_permission_key=file_permission_key,
+ file_permission_format=file_permission_format,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("SharePermission", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def set_properties(
+ self,
+ timeout: Optional[int] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties for the specified share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
+ :type quota: int
+ :param access_tier: Specifies the access tier of the share. Known values are:
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
+ :type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
+ :param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
+ are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
+ :type root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :param enable_snapshot_virtual_directory_access: Default value is None.
+ :type enable_snapshot_virtual_directory_access: bool
+ :param paid_bursting_enabled: Optional. Boolean. Default if not specified is false. This
+ property enables paid bursting. Default value is None.
+ :type paid_bursting_enabled: bool
+ :param paid_bursting_max_bandwidth_mibps: Optional. Integer. Default if not specified is the
+ maximum throughput the file share can support. Current maximum for a file share is 10,340
+ MiB/sec. Default value is None.
+ :type paid_bursting_max_bandwidth_mibps: int
+ :param paid_bursting_max_iops: Optional. Integer. Default if not specified is the maximum IOPS
+ the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
+ None.
+ :type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ quota=quota,
+ access_tier=access_tier,
+ lease_id=_lease_id,
+ root_squash=root_squash,
+ enable_snapshot_virtual_directory_access=enable_snapshot_virtual_directory_access,
+ paid_bursting_enabled=paid_bursting_enabled,
+ paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
+ paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def set_metadata(
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets one or more user-defined name-value pairs for the specified share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_access_policy(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
+ """Returns information about stored access policies specified on the share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: list of SignedIdentifier or the result of cls(response)
+ :rtype: list[~azure.storage.fileshare.models.SignedIdentifier]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_access_policy_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def set_access_policy(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ share_acl: Optional[List[_models.SignedIdentifier]] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets a stored access policy for use with shared access signatures.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :param share_acl: The ACL for the share. Default value is None.
+ :type share_acl: list[~azure.storage.fileshare.models.SignedIdentifier]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+ serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True}}
+ if share_acl is not None:
+ _content = self._serialize.body(
+ share_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt
+ )
+ else:
+ _content = None
+
+ _request = build_set_access_policy_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace_async
+ async def get_statistics(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> _models.ShareStats:
+ # pylint: disable=line-too-long
+ """Retrieves statistics related to the share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: ShareStats or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ShareStats
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats"))
+ cls: ClsType[_models.ShareStats] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_statistics_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ShareStats", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace_async
+ async def restore(
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ deleted_share_name: Optional[str] = None,
+ deleted_share_version: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Restores a previously deleted Share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param deleted_share_name: Specifies the name of the previously-deleted share. Default value is
+ None.
+ :type deleted_share_name: str
+ :param deleted_share_version: Specifies the version of the previously-deleted share. Default
+ value is None.
+ :type deleted_share_version: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_restore_request(
+ url=self._config.url,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ deleted_share_name=deleted_share_name,
+ deleted_share_version=deleted_share_version,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/__init__.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/__init__.py
new file mode 100644
index 00000000..cb14cb30
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/__init__.py
@@ -0,0 +1,130 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+
+from ._models_py3 import ( # type: ignore
+ AccessPolicy,
+ ClearRange,
+ CopyFileSmbInfo,
+ CorsRule,
+ DestinationLeaseAccessConditions,
+ DirectoryItem,
+ FileHTTPHeaders,
+ FileItem,
+ FileProperty,
+ FileRange,
+ FilesAndDirectoriesListSegment,
+ HandleItem,
+ LeaseAccessConditions,
+ ListFilesAndDirectoriesSegmentResponse,
+ ListHandlesResponse,
+ ListSharesResponse,
+ Metrics,
+ RetentionPolicy,
+ ShareFileRangeList,
+ ShareItemInternal,
+ SharePermission,
+ SharePropertiesInternal,
+ ShareProtocolSettings,
+ ShareSmbSettings,
+ ShareStats,
+ SignedIdentifier,
+ SmbMultichannel,
+ SourceLeaseAccessConditions,
+ SourceModifiedAccessConditions,
+ StorageError,
+ StorageServiceProperties,
+ StringEncoded,
+)
+
+from ._azure_file_storage_enums import ( # type: ignore
+ AccessRight,
+ CopyStatusType,
+ DeleteSnapshotsOptionType,
+ FileLastWrittenMode,
+ FilePermissionFormat,
+ FileRangeWriteType,
+ LeaseDurationType,
+ LeaseStateType,
+ LeaseStatusType,
+ ListFilesIncludeType,
+ ListSharesIncludeType,
+ ModeCopyMode,
+ NfsFileType,
+ OwnerCopyMode,
+ PermissionCopyModeType,
+ ShareAccessTier,
+ ShareRootSquash,
+ ShareTokenIntent,
+ StorageErrorCode,
+)
+from ._patch import __all__ as _patch_all
+from ._patch import *
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "AccessPolicy",
+ "ClearRange",
+ "CopyFileSmbInfo",
+ "CorsRule",
+ "DestinationLeaseAccessConditions",
+ "DirectoryItem",
+ "FileHTTPHeaders",
+ "FileItem",
+ "FileProperty",
+ "FileRange",
+ "FilesAndDirectoriesListSegment",
+ "HandleItem",
+ "LeaseAccessConditions",
+ "ListFilesAndDirectoriesSegmentResponse",
+ "ListHandlesResponse",
+ "ListSharesResponse",
+ "Metrics",
+ "RetentionPolicy",
+ "ShareFileRangeList",
+ "ShareItemInternal",
+ "SharePermission",
+ "SharePropertiesInternal",
+ "ShareProtocolSettings",
+ "ShareSmbSettings",
+ "ShareStats",
+ "SignedIdentifier",
+ "SmbMultichannel",
+ "SourceLeaseAccessConditions",
+ "SourceModifiedAccessConditions",
+ "StorageError",
+ "StorageServiceProperties",
+ "StringEncoded",
+ "AccessRight",
+ "CopyStatusType",
+ "DeleteSnapshotsOptionType",
+ "FileLastWrittenMode",
+ "FilePermissionFormat",
+ "FileRangeWriteType",
+ "LeaseDurationType",
+ "LeaseStateType",
+ "LeaseStatusType",
+ "ListFilesIncludeType",
+ "ListSharesIncludeType",
+ "ModeCopyMode",
+ "NfsFileType",
+ "OwnerCopyMode",
+ "PermissionCopyModeType",
+ "ShareAccessTier",
+ "ShareRootSquash",
+ "ShareTokenIntent",
+ "StorageErrorCode",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py
new file mode 100644
index 00000000..efc7a7fe
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_azure_file_storage_enums.py
@@ -0,0 +1,222 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum
+from azure.core import CaseInsensitiveEnumMeta
+
+
+class AccessRight(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Access rights of the access policy."""
+
+ READ = "Read"
+ WRITE = "Write"
+ DELETE = "Delete"
+
+
+class CopyStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """CopyStatusType."""
+
+ PENDING = "pending"
+ SUCCESS = "success"
+ ABORTED = "aborted"
+ FAILED = "failed"
+
+
+class DeleteSnapshotsOptionType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """DeleteSnapshotsOptionType."""
+
+ INCLUDE = "include"
+ INCLUDE_LEASED = "include-leased"
+
+
+class FileLastWrittenMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """FileLastWrittenMode."""
+
+ NOW = "Now"
+ PRESERVE = "Preserve"
+
+
+class FilePermissionFormat(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """FilePermissionFormat."""
+
+ SDDL = "Sddl"
+ BINARY = "Binary"
+
+
+class FileRangeWriteType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """FileRangeWriteType."""
+
+ UPDATE = "update"
+ CLEAR = "clear"
+
+
+class LeaseDurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """When a share is leased, specifies whether the lease is of infinite or fixed duration."""
+
+ INFINITE = "infinite"
+ FIXED = "fixed"
+
+
+class LeaseStateType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Lease state of the share."""
+
+ AVAILABLE = "available"
+ LEASED = "leased"
+ EXPIRED = "expired"
+ BREAKING = "breaking"
+ BROKEN = "broken"
+
+
+class LeaseStatusType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """The current lease status of the share."""
+
+ LOCKED = "locked"
+ UNLOCKED = "unlocked"
+
+
+class ListFilesIncludeType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ListFilesIncludeType."""
+
+ TIMESTAMPS = "Timestamps"
+ ETAG = "Etag"
+ ATTRIBUTES = "Attributes"
+ PERMISSION_KEY = "PermissionKey"
+
+
+class ListSharesIncludeType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ListSharesIncludeType."""
+
+ SNAPSHOTS = "snapshots"
+ METADATA = "metadata"
+ DELETED = "deleted"
+
+
+class ModeCopyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ModeCopyMode."""
+
+ SOURCE = "source"
+ OVERRIDE = "override"
+
+
+class NfsFileType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """NfsFileType."""
+
+ REGULAR = "Regular"
+ DIRECTORY = "Directory"
+ SYM_LINK = "SymLink"
+
+
+class OwnerCopyMode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """OwnerCopyMode."""
+
+ SOURCE = "source"
+ OVERRIDE = "override"
+
+
+class PermissionCopyModeType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """PermissionCopyModeType."""
+
+ SOURCE = "source"
+ OVERRIDE = "override"
+
+
+class ShareAccessTier(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ShareAccessTier."""
+
+ TRANSACTION_OPTIMIZED = "TransactionOptimized"
+ HOT = "Hot"
+ COOL = "Cool"
+ PREMIUM = "Premium"
+
+
+class ShareRootSquash(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ShareRootSquash."""
+
+ NO_ROOT_SQUASH = "NoRootSquash"
+ ROOT_SQUASH = "RootSquash"
+ ALL_SQUASH = "AllSquash"
+
+
+class ShareTokenIntent(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """ShareTokenIntent."""
+
+ BACKUP = "backup"
+
+
+class StorageErrorCode(str, Enum, metaclass=CaseInsensitiveEnumMeta):
+ """Error codes returned by the service."""
+
+ ACCOUNT_ALREADY_EXISTS = "AccountAlreadyExists"
+ ACCOUNT_BEING_CREATED = "AccountBeingCreated"
+ ACCOUNT_IS_DISABLED = "AccountIsDisabled"
+ AUTHENTICATION_FAILED = "AuthenticationFailed"
+ AUTHORIZATION_FAILURE = "AuthorizationFailure"
+ CONDITION_HEADERS_NOT_SUPPORTED = "ConditionHeadersNotSupported"
+ CONDITION_NOT_MET = "ConditionNotMet"
+ EMPTY_METADATA_KEY = "EmptyMetadataKey"
+ FILE_SHARE_PROVISIONED_BANDWIDTH_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedBandwidthDowngradeNotAllowed"
+ FILE_SHARE_PROVISIONED_IOPS_DOWNGRADE_NOT_ALLOWED = "FileShareProvisionedIopsDowngradeNotAllowed"
+ INSUFFICIENT_ACCOUNT_PERMISSIONS = "InsufficientAccountPermissions"
+ INTERNAL_ERROR = "InternalError"
+ INVALID_AUTHENTICATION_INFO = "InvalidAuthenticationInfo"
+ INVALID_HEADER_VALUE = "InvalidHeaderValue"
+ INVALID_HTTP_VERB = "InvalidHttpVerb"
+ INVALID_INPUT = "InvalidInput"
+ INVALID_MD5 = "InvalidMd5"
+ INVALID_METADATA = "InvalidMetadata"
+ INVALID_QUERY_PARAMETER_VALUE = "InvalidQueryParameterValue"
+ INVALID_RANGE = "InvalidRange"
+ INVALID_RESOURCE_NAME = "InvalidResourceName"
+ INVALID_URI = "InvalidUri"
+ INVALID_XML_DOCUMENT = "InvalidXmlDocument"
+ INVALID_XML_NODE_VALUE = "InvalidXmlNodeValue"
+ MD5_MISMATCH = "Md5Mismatch"
+ METADATA_TOO_LARGE = "MetadataTooLarge"
+ MISSING_CONTENT_LENGTH_HEADER = "MissingContentLengthHeader"
+ MISSING_REQUIRED_QUERY_PARAMETER = "MissingRequiredQueryParameter"
+ MISSING_REQUIRED_HEADER = "MissingRequiredHeader"
+ MISSING_REQUIRED_XML_NODE = "MissingRequiredXmlNode"
+ MULTIPLE_CONDITION_HEADERS_NOT_SUPPORTED = "MultipleConditionHeadersNotSupported"
+ OPERATION_TIMED_OUT = "OperationTimedOut"
+ OUT_OF_RANGE_INPUT = "OutOfRangeInput"
+ OUT_OF_RANGE_QUERY_PARAMETER_VALUE = "OutOfRangeQueryParameterValue"
+ REQUEST_BODY_TOO_LARGE = "RequestBodyTooLarge"
+ RESOURCE_TYPE_MISMATCH = "ResourceTypeMismatch"
+ REQUEST_URL_FAILED_TO_PARSE = "RequestUrlFailedToParse"
+ RESOURCE_ALREADY_EXISTS = "ResourceAlreadyExists"
+ RESOURCE_NOT_FOUND = "ResourceNotFound"
+ SERVER_BUSY = "ServerBusy"
+ UNSUPPORTED_HEADER = "UnsupportedHeader"
+ UNSUPPORTED_XML_NODE = "UnsupportedXmlNode"
+ UNSUPPORTED_QUERY_PARAMETER = "UnsupportedQueryParameter"
+ UNSUPPORTED_HTTP_VERB = "UnsupportedHttpVerb"
+ CANNOT_DELETE_FILE_OR_DIRECTORY = "CannotDeleteFileOrDirectory"
+ CLIENT_CACHE_FLUSH_DELAY = "ClientCacheFlushDelay"
+ DELETE_PENDING = "DeletePending"
+ DIRECTORY_NOT_EMPTY = "DirectoryNotEmpty"
+ FILE_LOCK_CONFLICT = "FileLockConflict"
+ INVALID_FILE_OR_DIRECTORY_PATH_NAME = "InvalidFileOrDirectoryPathName"
+ PARENT_NOT_FOUND = "ParentNotFound"
+ READ_ONLY_ATTRIBUTE = "ReadOnlyAttribute"
+ SHARE_ALREADY_EXISTS = "ShareAlreadyExists"
+ SHARE_BEING_DELETED = "ShareBeingDeleted"
+ SHARE_DISABLED = "ShareDisabled"
+ SHARE_NOT_FOUND = "ShareNotFound"
+ SHARING_VIOLATION = "SharingViolation"
+ SHARE_SNAPSHOT_IN_PROGRESS = "ShareSnapshotInProgress"
+ SHARE_SNAPSHOT_COUNT_EXCEEDED = "ShareSnapshotCountExceeded"
+ SHARE_SNAPSHOT_OPERATION_NOT_SUPPORTED = "ShareSnapshotOperationNotSupported"
+ SHARE_HAS_SNAPSHOTS = "ShareHasSnapshots"
+ PREVIOUS_SNAPSHOT_NOT_FOUND = "PreviousSnapshotNotFound"
+ CONTAINER_QUOTA_DOWNGRADE_NOT_ALLOWED = "ContainerQuotaDowngradeNotAllowed"
+ AUTHORIZATION_SOURCE_IP_MISMATCH = "AuthorizationSourceIPMismatch"
+ AUTHORIZATION_PROTOCOL_MISMATCH = "AuthorizationProtocolMismatch"
+ AUTHORIZATION_PERMISSION_MISMATCH = "AuthorizationPermissionMismatch"
+ AUTHORIZATION_SERVICE_MISMATCH = "AuthorizationServiceMismatch"
+ AUTHORIZATION_RESOURCE_TYPE_MISMATCH = "AuthorizationResourceTypeMismatch"
+ FEATURE_VERSION_MISMATCH = "FeatureVersionMismatch"
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_models_py3.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_models_py3.py
new file mode 100644
index 00000000..61fe75ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_models_py3.py
@@ -0,0 +1,1711 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union
+
+from .. import _serialization
+
+if TYPE_CHECKING:
+ from .. import models as _models
+
+
+class AccessPolicy(_serialization.Model):
+ """An Access policy.
+
+ :ivar start: The date-time the policy is active.
+ :vartype start: str
+ :ivar expiry: The date-time the policy expires.
+ :vartype expiry: str
+ :ivar permission: The permissions for the ACL policy.
+ :vartype permission: str
+ """
+
+ _attribute_map = {
+ "start": {"key": "Start", "type": "str"},
+ "expiry": {"key": "Expiry", "type": "str"},
+ "permission": {"key": "Permission", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ start: Optional[str] = None,
+ expiry: Optional[str] = None,
+ permission: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword start: The date-time the policy is active.
+ :paramtype start: str
+ :keyword expiry: The date-time the policy expires.
+ :paramtype expiry: str
+ :keyword permission: The permissions for the ACL policy.
+ :paramtype permission: str
+ """
+ super().__init__(**kwargs)
+ self.start = start
+ self.expiry = expiry
+ self.permission = permission
+
+
+class ClearRange(_serialization.Model):
+ """ClearRange.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar start: Required.
+ :vartype start: int
+ :ivar end: Required.
+ :vartype end: int
+ """
+
+ _validation = {
+ "start": {"required": True},
+ "end": {"required": True},
+ }
+
+ _attribute_map = {
+ "start": {"key": "Start", "type": "int", "xml": {"name": "Start"}},
+ "end": {"key": "End", "type": "int", "xml": {"name": "End"}},
+ }
+ _xml_map = {"name": "ClearRange"}
+
+ def __init__(self, *, start: int, end: int, **kwargs: Any) -> None:
+ """
+ :keyword start: Required.
+ :paramtype start: int
+ :keyword end: Required.
+ :paramtype end: int
+ """
+ super().__init__(**kwargs)
+ self.start = start
+ self.end = end
+
+
+class CopyFileSmbInfo(_serialization.Model):
+ """Parameter group.
+
+ :ivar file_attributes: Specifies either the option to copy file attributes from a source
+ file(source) to a target file or a list of attributes to set on a target file.
+ :vartype file_attributes: str
+ :ivar file_creation_time: Specifies either the option to copy file creation time from a source
+ file(source) to a target file or a time value in ISO 8601 format to set as creation time on a
+ target file.
+ :vartype file_creation_time: str
+ :ivar file_last_write_time: Specifies either the option to copy file last write time from a
+ source file(source) to a target file or a time value in ISO 8601 format to set as last write
+ time on a target file.
+ :vartype file_last_write_time: str
+ :ivar file_change_time: Specifies either the option to copy file last write time from a source
+ file(source) to a target file or a time value in ISO 8601 format to set as last write time on a
+ target file.
+ :vartype file_change_time: str
+ :ivar file_permission_copy_mode: Specifies the option to copy file security descriptor from
+ source file or to set it using the value which is defined by the header value of
+ x-ms-file-permission or x-ms-file-permission-key. Known values are: "source" and "override".
+ :vartype file_permission_copy_mode: str or
+ ~azure.storage.fileshare.models.PermissionCopyModeType
+ :ivar ignore_read_only: Specifies the option to overwrite the target file if it already exists
+ and has read-only attribute set.
+ :vartype ignore_read_only: bool
+ :ivar set_archive_attribute: Specifies the option to set archive attribute on a target file.
+ True means archive attribute will be set on a target file despite attribute overrides or a
+ source file state.
+ :vartype set_archive_attribute: bool
+ """
+
+ _attribute_map = {
+ "file_attributes": {"key": "fileAttributes", "type": "str"},
+ "file_creation_time": {"key": "fileCreationTime", "type": "str"},
+ "file_last_write_time": {"key": "fileLastWriteTime", "type": "str"},
+ "file_change_time": {"key": "fileChangeTime", "type": "str"},
+ "file_permission_copy_mode": {"key": "filePermissionCopyMode", "type": "str"},
+ "ignore_read_only": {"key": "ignoreReadOnly", "type": "bool"},
+ "set_archive_attribute": {"key": "setArchiveAttribute", "type": "bool"},
+ }
+
+ def __init__(
+ self,
+ *,
+ file_attributes: Optional[str] = None,
+ file_creation_time: Optional[str] = None,
+ file_last_write_time: Optional[str] = None,
+ file_change_time: Optional[str] = None,
+ file_permission_copy_mode: Optional[Union[str, "_models.PermissionCopyModeType"]] = None,
+ ignore_read_only: Optional[bool] = None,
+ set_archive_attribute: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword file_attributes: Specifies either the option to copy file attributes from a source
+ file(source) to a target file or a list of attributes to set on a target file.
+ :paramtype file_attributes: str
+ :keyword file_creation_time: Specifies either the option to copy file creation time from a
+ source file(source) to a target file or a time value in ISO 8601 format to set as creation time
+ on a target file.
+ :paramtype file_creation_time: str
+ :keyword file_last_write_time: Specifies either the option to copy file last write time from a
+ source file(source) to a target file or a time value in ISO 8601 format to set as last write
+ time on a target file.
+ :paramtype file_last_write_time: str
+ :keyword file_change_time: Specifies either the option to copy file last write time from a
+ source file(source) to a target file or a time value in ISO 8601 format to set as last write
+ time on a target file.
+ :paramtype file_change_time: str
+ :keyword file_permission_copy_mode: Specifies the option to copy file security descriptor from
+ source file or to set it using the value which is defined by the header value of
+ x-ms-file-permission or x-ms-file-permission-key. Known values are: "source" and "override".
+ :paramtype file_permission_copy_mode: str or
+ ~azure.storage.fileshare.models.PermissionCopyModeType
+ :keyword ignore_read_only: Specifies the option to overwrite the target file if it already
+ exists and has read-only attribute set.
+ :paramtype ignore_read_only: bool
+ :keyword set_archive_attribute: Specifies the option to set archive attribute on a target file.
+ True means archive attribute will be set on a target file despite attribute overrides or a
+ source file state.
+ :paramtype set_archive_attribute: bool
+ """
+ super().__init__(**kwargs)
+ self.file_attributes = file_attributes
+ self.file_creation_time = file_creation_time
+ self.file_last_write_time = file_last_write_time
+ self.file_change_time = file_change_time
+ self.file_permission_copy_mode = file_permission_copy_mode
+ self.ignore_read_only = ignore_read_only
+ self.set_archive_attribute = set_archive_attribute
+
+
+class CorsRule(_serialization.Model):
+ """CORS is an HTTP feature that enables a web application running under one domain to access
+ resources in another domain. Web browsers implement a security restriction known as same-origin
+ policy that prevents a web page from calling APIs in a different domain; CORS provides a secure
+ way to allow one domain (the origin domain) to call APIs in another domain.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar allowed_origins: The origin domains that are permitted to make a request against the
+ storage service via CORS. The origin domain is the domain from which the request originates.
+ Note that the origin must be an exact case-sensitive match with the origin that the user age
+ sends to the service. You can also use the wildcard character '*' to allow all origin domains
+ to make requests via CORS. Required.
+ :vartype allowed_origins: str
+ :ivar allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a
+ CORS request. (comma separated). Required.
+ :vartype allowed_methods: str
+ :ivar allowed_headers: The request headers that the origin domain may specify on the CORS
+ request. Required.
+ :vartype allowed_headers: str
+ :ivar exposed_headers: The response headers that may be sent in the response to the CORS
+ request and exposed by the browser to the request issuer. Required.
+ :vartype exposed_headers: str
+ :ivar max_age_in_seconds: The maximum amount time that a browser should cache the preflight
+ OPTIONS request. Required.
+ :vartype max_age_in_seconds: int
+ """
+
+ _validation = {
+ "allowed_origins": {"required": True},
+ "allowed_methods": {"required": True},
+ "allowed_headers": {"required": True},
+ "exposed_headers": {"required": True},
+ "max_age_in_seconds": {"required": True, "minimum": 0},
+ }
+
+ _attribute_map = {
+ "allowed_origins": {"key": "AllowedOrigins", "type": "str"},
+ "allowed_methods": {"key": "AllowedMethods", "type": "str"},
+ "allowed_headers": {"key": "AllowedHeaders", "type": "str"},
+ "exposed_headers": {"key": "ExposedHeaders", "type": "str"},
+ "max_age_in_seconds": {"key": "MaxAgeInSeconds", "type": "int"},
+ }
+
+ def __init__(
+ self,
+ *,
+ allowed_origins: str,
+ allowed_methods: str,
+ allowed_headers: str,
+ exposed_headers: str,
+ max_age_in_seconds: int,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword allowed_origins: The origin domains that are permitted to make a request against the
+ storage service via CORS. The origin domain is the domain from which the request originates.
+ Note that the origin must be an exact case-sensitive match with the origin that the user age
+ sends to the service. You can also use the wildcard character '*' to allow all origin domains
+ to make requests via CORS. Required.
+ :paramtype allowed_origins: str
+ :keyword allowed_methods: The methods (HTTP request verbs) that the origin domain may use for a
+ CORS request. (comma separated). Required.
+ :paramtype allowed_methods: str
+ :keyword allowed_headers: The request headers that the origin domain may specify on the CORS
+ request. Required.
+ :paramtype allowed_headers: str
+ :keyword exposed_headers: The response headers that may be sent in the response to the CORS
+ request and exposed by the browser to the request issuer. Required.
+ :paramtype exposed_headers: str
+ :keyword max_age_in_seconds: The maximum amount time that a browser should cache the preflight
+ OPTIONS request. Required.
+ :paramtype max_age_in_seconds: int
+ """
+ super().__init__(**kwargs)
+ self.allowed_origins = allowed_origins
+ self.allowed_methods = allowed_methods
+ self.allowed_headers = allowed_headers
+ self.exposed_headers = exposed_headers
+ self.max_age_in_seconds = max_age_in_seconds
+
+
+class DestinationLeaseAccessConditions(_serialization.Model):
+ """Parameter group.
+
+ :ivar destination_lease_id: Required if the destination file has an active infinite lease. The
+ lease ID specified for this header must match the lease ID of the destination file. If the
+ request does not include the lease ID or it is not valid, the operation fails with status code
+ 412 (Precondition Failed). If this header is specified and the destination file does not
+ currently have an active lease, the operation will also fail with status code 412 (Precondition
+ Failed).
+ :vartype destination_lease_id: str
+ """
+
+ _attribute_map = {
+ "destination_lease_id": {"key": "destinationLeaseId", "type": "str"},
+ }
+
+ def __init__(self, *, destination_lease_id: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword destination_lease_id: Required if the destination file has an active infinite lease.
+ The lease ID specified for this header must match the lease ID of the destination file. If the
+ request does not include the lease ID or it is not valid, the operation fails with status code
+ 412 (Precondition Failed). If this header is specified and the destination file does not
+ currently have an active lease, the operation will also fail with status code 412 (Precondition
+ Failed).
+ :paramtype destination_lease_id: str
+ """
+ super().__init__(**kwargs)
+ self.destination_lease_id = destination_lease_id
+
+
+class DirectoryItem(_serialization.Model):
+ """A listed directory item.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar name: Required.
+ :vartype name: ~azure.storage.fileshare.models.StringEncoded
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar properties: File properties.
+ :vartype properties: ~azure.storage.fileshare.models.FileProperty
+ :ivar attributes:
+ :vartype attributes: str
+ :ivar permission_key:
+ :vartype permission_key: str
+ """
+
+ _validation = {
+ "name": {"required": True},
+ }
+
+ _attribute_map = {
+ "name": {"key": "Name", "type": "StringEncoded"},
+ "file_id": {"key": "FileId", "type": "str"},
+ "properties": {"key": "Properties", "type": "FileProperty"},
+ "attributes": {"key": "Attributes", "type": "str"},
+ "permission_key": {"key": "PermissionKey", "type": "str"},
+ }
+ _xml_map = {"name": "Directory"}
+
+ def __init__(
+ self,
+ *,
+ name: "_models.StringEncoded",
+ file_id: Optional[str] = None,
+ properties: Optional["_models.FileProperty"] = None,
+ attributes: Optional[str] = None,
+ permission_key: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword name: Required.
+ :paramtype name: ~azure.storage.fileshare.models.StringEncoded
+ :keyword file_id:
+ :paramtype file_id: str
+ :keyword properties: File properties.
+ :paramtype properties: ~azure.storage.fileshare.models.FileProperty
+ :keyword attributes:
+ :paramtype attributes: str
+ :keyword permission_key:
+ :paramtype permission_key: str
+ """
+ super().__init__(**kwargs)
+ self.name = name
+ self.file_id = file_id
+ self.properties = properties
+ self.attributes = attributes
+ self.permission_key = permission_key
+
+
+class FileHTTPHeaders(_serialization.Model):
+ """Parameter group.
+
+ :ivar file_content_type: Sets the MIME content type of the file. The default type is
+ 'application/octet-stream'.
+ :vartype file_content_type: str
+ :ivar file_content_encoding: Specifies which content encodings have been applied to the file.
+ :vartype file_content_encoding: str
+ :ivar file_content_language: Specifies the natural languages used by this resource.
+ :vartype file_content_language: str
+ :ivar file_cache_control: Sets the file's cache control. The File service stores this value but
+ does not use or modify it.
+ :vartype file_cache_control: str
+ :ivar file_content_md5: Sets the file's MD5 hash.
+ :vartype file_content_md5: bytes
+ :ivar file_content_disposition: Sets the file's Content-Disposition header.
+ :vartype file_content_disposition: str
+ """
+
+ _attribute_map = {
+ "file_content_type": {"key": "fileContentType", "type": "str"},
+ "file_content_encoding": {"key": "fileContentEncoding", "type": "str"},
+ "file_content_language": {"key": "fileContentLanguage", "type": "str"},
+ "file_cache_control": {"key": "fileCacheControl", "type": "str"},
+ "file_content_md5": {"key": "fileContentMD5", "type": "bytearray"},
+ "file_content_disposition": {"key": "fileContentDisposition", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ file_content_type: Optional[str] = None,
+ file_content_encoding: Optional[str] = None,
+ file_content_language: Optional[str] = None,
+ file_cache_control: Optional[str] = None,
+ file_content_md5: Optional[bytes] = None,
+ file_content_disposition: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword file_content_type: Sets the MIME content type of the file. The default type is
+ 'application/octet-stream'.
+ :paramtype file_content_type: str
+ :keyword file_content_encoding: Specifies which content encodings have been applied to the
+ file.
+ :paramtype file_content_encoding: str
+ :keyword file_content_language: Specifies the natural languages used by this resource.
+ :paramtype file_content_language: str
+ :keyword file_cache_control: Sets the file's cache control. The File service stores this value
+ but does not use or modify it.
+ :paramtype file_cache_control: str
+ :keyword file_content_md5: Sets the file's MD5 hash.
+ :paramtype file_content_md5: bytes
+ :keyword file_content_disposition: Sets the file's Content-Disposition header.
+ :paramtype file_content_disposition: str
+ """
+ super().__init__(**kwargs)
+ self.file_content_type = file_content_type
+ self.file_content_encoding = file_content_encoding
+ self.file_content_language = file_content_language
+ self.file_cache_control = file_cache_control
+ self.file_content_md5 = file_content_md5
+ self.file_content_disposition = file_content_disposition
+
+
+class FileItem(_serialization.Model):
+ """A listed file item.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar name: Required.
+ :vartype name: ~azure.storage.fileshare.models.StringEncoded
+ :ivar file_id:
+ :vartype file_id: str
+ :ivar properties: File properties. Required.
+ :vartype properties: ~azure.storage.fileshare.models.FileProperty
+ :ivar attributes:
+ :vartype attributes: str
+ :ivar permission_key:
+ :vartype permission_key: str
+ """
+
+ _validation = {
+ "name": {"required": True},
+ "properties": {"required": True},
+ }
+
+ _attribute_map = {
+ "name": {"key": "Name", "type": "StringEncoded"},
+ "file_id": {"key": "FileId", "type": "str"},
+ "properties": {"key": "Properties", "type": "FileProperty"},
+ "attributes": {"key": "Attributes", "type": "str"},
+ "permission_key": {"key": "PermissionKey", "type": "str"},
+ }
+ _xml_map = {"name": "File"}
+
+ def __init__(
+ self,
+ *,
+ name: "_models.StringEncoded",
+ properties: "_models.FileProperty",
+ file_id: Optional[str] = None,
+ attributes: Optional[str] = None,
+ permission_key: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword name: Required.
+ :paramtype name: ~azure.storage.fileshare.models.StringEncoded
+ :keyword file_id:
+ :paramtype file_id: str
+ :keyword properties: File properties. Required.
+ :paramtype properties: ~azure.storage.fileshare.models.FileProperty
+ :keyword attributes:
+ :paramtype attributes: str
+ :keyword permission_key:
+ :paramtype permission_key: str
+ """
+ super().__init__(**kwargs)
+ self.name = name
+ self.file_id = file_id
+ self.properties = properties
+ self.attributes = attributes
+ self.permission_key = permission_key
+
+
+class FileProperty(_serialization.Model):
+ """File properties.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar content_length: Content length of the file. This value may not be up-to-date since an SMB
+ client may have modified the file locally. The value of Content-Length may not reflect that
+ fact until the handle is closed or the op-lock is broken. To retrieve current property values,
+ call Get File Properties. Required.
+ :vartype content_length: int
+ :ivar creation_time:
+ :vartype creation_time: ~datetime.datetime
+ :ivar last_access_time:
+ :vartype last_access_time: ~datetime.datetime
+ :ivar last_write_time:
+ :vartype last_write_time: ~datetime.datetime
+ :ivar change_time:
+ :vartype change_time: ~datetime.datetime
+ :ivar last_modified:
+ :vartype last_modified: ~datetime.datetime
+ :ivar etag:
+ :vartype etag: str
+ """
+
+ _validation = {
+ "content_length": {"required": True},
+ }
+
+ _attribute_map = {
+ "content_length": {"key": "Content-Length", "type": "int"},
+ "creation_time": {"key": "CreationTime", "type": "iso-8601"},
+ "last_access_time": {"key": "LastAccessTime", "type": "iso-8601"},
+ "last_write_time": {"key": "LastWriteTime", "type": "iso-8601"},
+ "change_time": {"key": "ChangeTime", "type": "iso-8601"},
+ "last_modified": {"key": "Last-Modified", "type": "rfc-1123"},
+ "etag": {"key": "Etag", "type": "str"},
+ }
+
+ def __init__(
+ self,
+ *,
+ content_length: int,
+ creation_time: Optional[datetime.datetime] = None,
+ last_access_time: Optional[datetime.datetime] = None,
+ last_write_time: Optional[datetime.datetime] = None,
+ change_time: Optional[datetime.datetime] = None,
+ last_modified: Optional[datetime.datetime] = None,
+ etag: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword content_length: Content length of the file. This value may not be up-to-date since an
+ SMB client may have modified the file locally. The value of Content-Length may not reflect that
+ fact until the handle is closed or the op-lock is broken. To retrieve current property values,
+ call Get File Properties. Required.
+ :paramtype content_length: int
+ :keyword creation_time:
+ :paramtype creation_time: ~datetime.datetime
+ :keyword last_access_time:
+ :paramtype last_access_time: ~datetime.datetime
+ :keyword last_write_time:
+ :paramtype last_write_time: ~datetime.datetime
+ :keyword change_time:
+ :paramtype change_time: ~datetime.datetime
+ :keyword last_modified:
+ :paramtype last_modified: ~datetime.datetime
+ :keyword etag:
+ :paramtype etag: str
+ """
+ super().__init__(**kwargs)
+ self.content_length = content_length
+ self.creation_time = creation_time
+ self.last_access_time = last_access_time
+ self.last_write_time = last_write_time
+ self.change_time = change_time
+ self.last_modified = last_modified
+ self.etag = etag
+
+
+class FileRange(_serialization.Model):
+ """An Azure Storage file range.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar start: Start of the range. Required.
+ :vartype start: int
+ :ivar end: End of the range. Required.
+ :vartype end: int
+ """
+
+ _validation = {
+ "start": {"required": True},
+ "end": {"required": True},
+ }
+
+ _attribute_map = {
+ "start": {"key": "Start", "type": "int"},
+ "end": {"key": "End", "type": "int"},
+ }
+ _xml_map = {"name": "Range"}
+
+ def __init__(self, *, start: int, end: int, **kwargs: Any) -> None:
+ """
+ :keyword start: Start of the range. Required.
+ :paramtype start: int
+ :keyword end: End of the range. Required.
+ :paramtype end: int
+ """
+ super().__init__(**kwargs)
+ self.start = start
+ self.end = end
+
+
+class FilesAndDirectoriesListSegment(_serialization.Model):
+ """Abstract for entries that can be listed from Directory.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar directory_items: Required.
+ :vartype directory_items: list[~azure.storage.fileshare.models.DirectoryItem]
+ :ivar file_items: Required.
+ :vartype file_items: list[~azure.storage.fileshare.models.FileItem]
+ """
+
+ _validation = {
+ "directory_items": {"required": True},
+ "file_items": {"required": True},
+ }
+
+ _attribute_map = {
+ "directory_items": {"key": "DirectoryItems", "type": "[DirectoryItem]", "xml": {"itemsName": "Directory"}},
+ "file_items": {"key": "FileItems", "type": "[FileItem]", "xml": {"itemsName": "File"}},
+ }
+ _xml_map = {"name": "Entries"}
+
+ def __init__(
+ self, *, directory_items: List["_models.DirectoryItem"], file_items: List["_models.FileItem"], **kwargs: Any
+ ) -> None:
+ """
+ :keyword directory_items: Required.
+ :paramtype directory_items: list[~azure.storage.fileshare.models.DirectoryItem]
+ :keyword file_items: Required.
+ :paramtype file_items: list[~azure.storage.fileshare.models.FileItem]
+ """
+ super().__init__(**kwargs)
+ self.directory_items = directory_items
+ self.file_items = file_items
+
+
+class HandleItem(_serialization.Model):
+ """A listed Azure Storage handle item.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar handle_id: XSMB service handle ID. Required.
+ :vartype handle_id: str
+ :ivar path: Required.
+ :vartype path: ~azure.storage.fileshare.models.StringEncoded
+ :ivar file_id: FileId uniquely identifies the file or directory. Required.
+ :vartype file_id: str
+ :ivar parent_id: ParentId uniquely identifies the parent directory of the object.
+ :vartype parent_id: str
+ :ivar session_id: SMB session ID in context of which the file handle was opened. Required.
+ :vartype session_id: str
+ :ivar client_ip: Client IP that opened the handle. Required.
+ :vartype client_ip: str
+ :ivar client_name: Name of the client machine where the share is being mounted. Required.
+ :vartype client_name: str
+ :ivar open_time: Time when the session that previously opened the handle has last been
+ reconnected. (UTC). Required.
+ :vartype open_time: ~datetime.datetime
+ :ivar last_reconnect_time: Time handle was last connected to (UTC).
+ :vartype last_reconnect_time: ~datetime.datetime
+ :ivar access_right_list:
+ :vartype access_right_list: list[str or ~azure.storage.fileshare.models.AccessRight]
+ """
+
+ _validation = {
+ "handle_id": {"required": True},
+ "path": {"required": True},
+ "file_id": {"required": True},
+ "session_id": {"required": True},
+ "client_ip": {"required": True},
+ "client_name": {"required": True},
+ "open_time": {"required": True},
+ }
+
+ _attribute_map = {
+ "handle_id": {"key": "HandleId", "type": "str"},
+ "path": {"key": "Path", "type": "StringEncoded"},
+ "file_id": {"key": "FileId", "type": "str"},
+ "parent_id": {"key": "ParentId", "type": "str"},
+ "session_id": {"key": "SessionId", "type": "str"},
+ "client_ip": {"key": "ClientIp", "type": "str"},
+ "client_name": {"key": "ClientName", "type": "str"},
+ "open_time": {"key": "OpenTime", "type": "rfc-1123"},
+ "last_reconnect_time": {"key": "LastReconnectTime", "type": "rfc-1123"},
+ "access_right_list": {"key": "AccessRightList", "type": "[str]", "xml": {"wrapped": True}},
+ }
+ _xml_map = {"name": "Handle"}
+
+ def __init__(
+ self,
+ *,
+ handle_id: str,
+ path: "_models.StringEncoded",
+ file_id: str,
+ session_id: str,
+ client_ip: str,
+ client_name: str,
+ open_time: datetime.datetime,
+ parent_id: Optional[str] = None,
+ last_reconnect_time: Optional[datetime.datetime] = None,
+ access_right_list: Optional[List[Union[str, "_models.AccessRight"]]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword handle_id: XSMB service handle ID. Required.
+ :paramtype handle_id: str
+ :keyword path: Required.
+ :paramtype path: ~azure.storage.fileshare.models.StringEncoded
+ :keyword file_id: FileId uniquely identifies the file or directory. Required.
+ :paramtype file_id: str
+ :keyword parent_id: ParentId uniquely identifies the parent directory of the object.
+ :paramtype parent_id: str
+ :keyword session_id: SMB session ID in context of which the file handle was opened. Required.
+ :paramtype session_id: str
+ :keyword client_ip: Client IP that opened the handle. Required.
+ :paramtype client_ip: str
+ :keyword client_name: Name of the client machine where the share is being mounted. Required.
+ :paramtype client_name: str
+ :keyword open_time: Time when the session that previously opened the handle has last been
+ reconnected. (UTC). Required.
+ :paramtype open_time: ~datetime.datetime
+ :keyword last_reconnect_time: Time handle was last connected to (UTC).
+ :paramtype last_reconnect_time: ~datetime.datetime
+ :keyword access_right_list:
+ :paramtype access_right_list: list[str or ~azure.storage.fileshare.models.AccessRight]
+ """
+ super().__init__(**kwargs)
+ self.handle_id = handle_id
+ self.path = path
+ self.file_id = file_id
+ self.parent_id = parent_id
+ self.session_id = session_id
+ self.client_ip = client_ip
+ self.client_name = client_name
+ self.open_time = open_time
+ self.last_reconnect_time = last_reconnect_time
+ self.access_right_list = access_right_list
+
+
+class LeaseAccessConditions(_serialization.Model):
+ """Parameter group.
+
+ :ivar lease_id: If specified, the operation only succeeds if the resource's lease is active and
+ matches this ID.
+ :vartype lease_id: str
+ """
+
+ _attribute_map = {
+ "lease_id": {"key": "leaseId", "type": "str"},
+ }
+
+ def __init__(self, *, lease_id: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword lease_id: If specified, the operation only succeeds if the resource's lease is active
+ and matches this ID.
+ :paramtype lease_id: str
+ """
+ super().__init__(**kwargs)
+ self.lease_id = lease_id
+
+
+class ListFilesAndDirectoriesSegmentResponse(_serialization.Model):
+ """An enumeration of directories and files.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar service_endpoint: Required.
+ :vartype service_endpoint: str
+ :ivar share_name: Required.
+ :vartype share_name: str
+ :ivar share_snapshot:
+ :vartype share_snapshot: str
+ :ivar encoded:
+ :vartype encoded: bool
+ :ivar directory_path: Required.
+ :vartype directory_path: str
+ :ivar prefix: Required.
+ :vartype prefix: ~azure.storage.fileshare.models.StringEncoded
+ :ivar marker:
+ :vartype marker: str
+ :ivar max_results:
+ :vartype max_results: int
+ :ivar segment: Abstract for entries that can be listed from Directory. Required.
+ :vartype segment: ~azure.storage.fileshare.models.FilesAndDirectoriesListSegment
+ :ivar next_marker: Required.
+ :vartype next_marker: str
+ :ivar directory_id:
+ :vartype directory_id: str
+ """
+
+ _validation = {
+ "service_endpoint": {"required": True},
+ "share_name": {"required": True},
+ "directory_path": {"required": True},
+ "prefix": {"required": True},
+ "segment": {"required": True},
+ "next_marker": {"required": True},
+ }
+
+ _attribute_map = {
+ "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}},
+ "share_name": {"key": "ShareName", "type": "str", "xml": {"attr": True}},
+ "share_snapshot": {"key": "ShareSnapshot", "type": "str", "xml": {"attr": True}},
+ "encoded": {"key": "Encoded", "type": "bool", "xml": {"attr": True}},
+ "directory_path": {"key": "DirectoryPath", "type": "str", "xml": {"attr": True}},
+ "prefix": {"key": "Prefix", "type": "StringEncoded"},
+ "marker": {"key": "Marker", "type": "str"},
+ "max_results": {"key": "MaxResults", "type": "int"},
+ "segment": {"key": "Segment", "type": "FilesAndDirectoriesListSegment"},
+ "next_marker": {"key": "NextMarker", "type": "str"},
+ "directory_id": {"key": "DirectoryId", "type": "str"},
+ }
+ _xml_map = {"name": "EnumerationResults"}
+
+ def __init__(
+ self,
+ *,
+ service_endpoint: str,
+ share_name: str,
+ directory_path: str,
+ prefix: "_models.StringEncoded",
+ segment: "_models.FilesAndDirectoriesListSegment",
+ next_marker: str,
+ share_snapshot: Optional[str] = None,
+ encoded: Optional[bool] = None,
+ marker: Optional[str] = None,
+ max_results: Optional[int] = None,
+ directory_id: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword service_endpoint: Required.
+ :paramtype service_endpoint: str
+ :keyword share_name: Required.
+ :paramtype share_name: str
+ :keyword share_snapshot:
+ :paramtype share_snapshot: str
+ :keyword encoded:
+ :paramtype encoded: bool
+ :keyword directory_path: Required.
+ :paramtype directory_path: str
+ :keyword prefix: Required.
+ :paramtype prefix: ~azure.storage.fileshare.models.StringEncoded
+ :keyword marker:
+ :paramtype marker: str
+ :keyword max_results:
+ :paramtype max_results: int
+ :keyword segment: Abstract for entries that can be listed from Directory. Required.
+ :paramtype segment: ~azure.storage.fileshare.models.FilesAndDirectoriesListSegment
+ :keyword next_marker: Required.
+ :paramtype next_marker: str
+ :keyword directory_id:
+ :paramtype directory_id: str
+ """
+ super().__init__(**kwargs)
+ self.service_endpoint = service_endpoint
+ self.share_name = share_name
+ self.share_snapshot = share_snapshot
+ self.encoded = encoded
+ self.directory_path = directory_path
+ self.prefix = prefix
+ self.marker = marker
+ self.max_results = max_results
+ self.segment = segment
+ self.next_marker = next_marker
+ self.directory_id = directory_id
+
+
+class ListHandlesResponse(_serialization.Model):
+ """An enumeration of handles.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar handle_list:
+ :vartype handle_list: list[~azure.storage.fileshare.models.HandleItem]
+ :ivar next_marker: Required.
+ :vartype next_marker: str
+ """
+
+ _validation = {
+ "next_marker": {"required": True},
+ }
+
+ _attribute_map = {
+ "handle_list": {
+ "key": "HandleList",
+ "type": "[HandleItem]",
+ "xml": {"name": "Entries", "wrapped": True, "itemsName": "Handle"},
+ },
+ "next_marker": {"key": "NextMarker", "type": "str"},
+ }
+ _xml_map = {"name": "EnumerationResults"}
+
+ def __init__(
+ self, *, next_marker: str, handle_list: Optional[List["_models.HandleItem"]] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword handle_list:
+ :paramtype handle_list: list[~azure.storage.fileshare.models.HandleItem]
+ :keyword next_marker: Required.
+ :paramtype next_marker: str
+ """
+ super().__init__(**kwargs)
+ self.handle_list = handle_list
+ self.next_marker = next_marker
+
+
+class ListSharesResponse(_serialization.Model):
+ """An enumeration of shares.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar service_endpoint: Required.
+ :vartype service_endpoint: str
+ :ivar prefix:
+ :vartype prefix: str
+ :ivar marker:
+ :vartype marker: str
+ :ivar max_results:
+ :vartype max_results: int
+ :ivar share_items:
+ :vartype share_items: list[~azure.storage.fileshare.models.ShareItemInternal]
+ :ivar next_marker: Required.
+ :vartype next_marker: str
+ """
+
+ _validation = {
+ "service_endpoint": {"required": True},
+ "next_marker": {"required": True},
+ }
+
+ _attribute_map = {
+ "service_endpoint": {"key": "ServiceEndpoint", "type": "str", "xml": {"attr": True}},
+ "prefix": {"key": "Prefix", "type": "str"},
+ "marker": {"key": "Marker", "type": "str"},
+ "max_results": {"key": "MaxResults", "type": "int"},
+ "share_items": {
+ "key": "ShareItems",
+ "type": "[ShareItemInternal]",
+ "xml": {"name": "Shares", "wrapped": True, "itemsName": "Share"},
+ },
+ "next_marker": {"key": "NextMarker", "type": "str"},
+ }
+ _xml_map = {"name": "EnumerationResults"}
+
+ def __init__(
+ self,
+ *,
+ service_endpoint: str,
+ next_marker: str,
+ prefix: Optional[str] = None,
+ marker: Optional[str] = None,
+ max_results: Optional[int] = None,
+ share_items: Optional[List["_models.ShareItemInternal"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword service_endpoint: Required.
+ :paramtype service_endpoint: str
+ :keyword prefix:
+ :paramtype prefix: str
+ :keyword marker:
+ :paramtype marker: str
+ :keyword max_results:
+ :paramtype max_results: int
+ :keyword share_items:
+ :paramtype share_items: list[~azure.storage.fileshare.models.ShareItemInternal]
+ :keyword next_marker: Required.
+ :paramtype next_marker: str
+ """
+ super().__init__(**kwargs)
+ self.service_endpoint = service_endpoint
+ self.prefix = prefix
+ self.marker = marker
+ self.max_results = max_results
+ self.share_items = share_items
+ self.next_marker = next_marker
+
+
+class Metrics(_serialization.Model):
+ """Storage Analytics metrics for file service.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar version: The version of Storage Analytics to configure. Required.
+ :vartype version: str
+ :ivar enabled: Indicates whether metrics are enabled for the File service. Required.
+ :vartype enabled: bool
+ :ivar include_apis: Indicates whether metrics should generate summary statistics for called API
+ operations.
+ :vartype include_apis: bool
+ :ivar retention_policy: The retention policy.
+ :vartype retention_policy: ~azure.storage.fileshare.models.RetentionPolicy
+ """
+
+ _validation = {
+ "version": {"required": True},
+ "enabled": {"required": True},
+ }
+
+ _attribute_map = {
+ "version": {"key": "Version", "type": "str"},
+ "enabled": {"key": "Enabled", "type": "bool"},
+ "include_apis": {"key": "IncludeAPIs", "type": "bool"},
+ "retention_policy": {"key": "RetentionPolicy", "type": "RetentionPolicy"},
+ }
+
+ def __init__(
+ self,
+ *,
+ version: str,
+ enabled: bool,
+ include_apis: Optional[bool] = None,
+ retention_policy: Optional["_models.RetentionPolicy"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword version: The version of Storage Analytics to configure. Required.
+ :paramtype version: str
+ :keyword enabled: Indicates whether metrics are enabled for the File service. Required.
+ :paramtype enabled: bool
+ :keyword include_apis: Indicates whether metrics should generate summary statistics for called
+ API operations.
+ :paramtype include_apis: bool
+ :keyword retention_policy: The retention policy.
+ :paramtype retention_policy: ~azure.storage.fileshare.models.RetentionPolicy
+ """
+ super().__init__(**kwargs)
+ self.version = version
+ self.enabled = enabled
+ self.include_apis = include_apis
+ self.retention_policy = retention_policy
+
+
+class RetentionPolicy(_serialization.Model):
+ """The retention policy.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar enabled: Indicates whether a retention policy is enabled for the File service. If false,
+ metrics data is retained, and the user is responsible for deleting it. Required.
+ :vartype enabled: bool
+ :ivar days: Indicates the number of days that metrics data should be retained. All data older
+ than this value will be deleted. Metrics data is deleted on a best-effort basis after the
+ retention period expires.
+ :vartype days: int
+ """
+
+ _validation = {
+ "enabled": {"required": True},
+ "days": {"maximum": 365, "minimum": 1},
+ }
+
+ _attribute_map = {
+ "enabled": {"key": "Enabled", "type": "bool"},
+ "days": {"key": "Days", "type": "int"},
+ }
+
+ def __init__(self, *, enabled: bool, days: Optional[int] = None, **kwargs: Any) -> None:
+ """
+ :keyword enabled: Indicates whether a retention policy is enabled for the File service. If
+ false, metrics data is retained, and the user is responsible for deleting it. Required.
+ :paramtype enabled: bool
+ :keyword days: Indicates the number of days that metrics data should be retained. All data
+ older than this value will be deleted. Metrics data is deleted on a best-effort basis after the
+ retention period expires.
+ :paramtype days: int
+ """
+ super().__init__(**kwargs)
+ self.enabled = enabled
+ self.days = days
+
+
+class ShareFileRangeList(_serialization.Model):
+ """The list of file ranges.
+
+ :ivar ranges:
+ :vartype ranges: list[~azure.storage.fileshare.models.FileRange]
+ :ivar clear_ranges:
+ :vartype clear_ranges: list[~azure.storage.fileshare.models.ClearRange]
+ """
+
+ _attribute_map = {
+ "ranges": {"key": "Ranges", "type": "[FileRange]", "xml": {"itemsName": "Range"}},
+ "clear_ranges": {"key": "ClearRanges", "type": "[ClearRange]", "xml": {"itemsName": "ClearRange"}},
+ }
+
+ def __init__(
+ self,
+ *,
+ ranges: Optional[List["_models.FileRange"]] = None,
+ clear_ranges: Optional[List["_models.ClearRange"]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword ranges:
+ :paramtype ranges: list[~azure.storage.fileshare.models.FileRange]
+ :keyword clear_ranges:
+ :paramtype clear_ranges: list[~azure.storage.fileshare.models.ClearRange]
+ """
+ super().__init__(**kwargs)
+ self.ranges = ranges
+ self.clear_ranges = clear_ranges
+
+
+class ShareItemInternal(_serialization.Model):
+ """A listed Azure Storage share item.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar name: Required.
+ :vartype name: str
+ :ivar snapshot:
+ :vartype snapshot: str
+ :ivar deleted:
+ :vartype deleted: bool
+ :ivar version:
+ :vartype version: str
+ :ivar properties: Properties of a share. Required.
+ :vartype properties: ~azure.storage.fileshare.models.SharePropertiesInternal
+ :ivar metadata: Dictionary of :code:`<string>`.
+ :vartype metadata: dict[str, str]
+ """
+
+ _validation = {
+ "name": {"required": True},
+ "properties": {"required": True},
+ }
+
+ _attribute_map = {
+ "name": {"key": "Name", "type": "str"},
+ "snapshot": {"key": "Snapshot", "type": "str"},
+ "deleted": {"key": "Deleted", "type": "bool"},
+ "version": {"key": "Version", "type": "str"},
+ "properties": {"key": "Properties", "type": "SharePropertiesInternal"},
+ "metadata": {"key": "Metadata", "type": "{str}"},
+ }
+ _xml_map = {"name": "Share"}
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ properties: "_models.SharePropertiesInternal",
+ snapshot: Optional[str] = None,
+ deleted: Optional[bool] = None,
+ version: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword name: Required.
+ :paramtype name: str
+ :keyword snapshot:
+ :paramtype snapshot: str
+ :keyword deleted:
+ :paramtype deleted: bool
+ :keyword version:
+ :paramtype version: str
+ :keyword properties: Properties of a share. Required.
+ :paramtype properties: ~azure.storage.fileshare.models.SharePropertiesInternal
+ :keyword metadata: Dictionary of :code:`<string>`.
+ :paramtype metadata: dict[str, str]
+ """
+ super().__init__(**kwargs)
+ self.name = name
+ self.snapshot = snapshot
+ self.deleted = deleted
+ self.version = version
+ self.properties = properties
+ self.metadata = metadata
+
+
+class SharePermission(_serialization.Model):
+ """A permission (a security descriptor) at the share level.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar permission: The permission in the Security Descriptor Definition Language (SDDL).
+ Required.
+ :vartype permission: str
+ :ivar format: Known values are: "Sddl" and "Binary".
+ :vartype format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ """
+
+ _validation = {
+ "permission": {"required": True},
+ }
+
+ _attribute_map = {
+ "permission": {"key": "permission", "type": "str"},
+ "format": {"key": "format", "type": "str"},
+ }
+
+ def __init__(
+ self, *, permission: str, format: Optional[Union[str, "_models.FilePermissionFormat"]] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword permission: The permission in the Security Descriptor Definition Language (SDDL).
+ Required.
+ :paramtype permission: str
+ :keyword format: Known values are: "Sddl" and "Binary".
+ :paramtype format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ """
+ super().__init__(**kwargs)
+ self.permission = permission
+ self.format = format
+
+
+class SharePropertiesInternal(_serialization.Model):
+ """Properties of a share.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar last_modified: Required.
+ :vartype last_modified: ~datetime.datetime
+ :ivar etag: Required.
+ :vartype etag: str
+ :ivar quota: Required.
+ :vartype quota: int
+ :ivar provisioned_iops:
+ :vartype provisioned_iops: int
+ :ivar provisioned_ingress_m_bps:
+ :vartype provisioned_ingress_m_bps: int
+ :ivar provisioned_egress_m_bps:
+ :vartype provisioned_egress_m_bps: int
+ :ivar provisioned_bandwidth_mi_bps:
+ :vartype provisioned_bandwidth_mi_bps: int
+ :ivar next_allowed_quota_downgrade_time:
+ :vartype next_allowed_quota_downgrade_time: ~datetime.datetime
+ :ivar deleted_time:
+ :vartype deleted_time: ~datetime.datetime
+ :ivar remaining_retention_days:
+ :vartype remaining_retention_days: int
+ :ivar access_tier:
+ :vartype access_tier: str
+ :ivar access_tier_change_time:
+ :vartype access_tier_change_time: ~datetime.datetime
+ :ivar access_tier_transition_state:
+ :vartype access_tier_transition_state: str
+ :ivar lease_status: The current lease status of the share. Known values are: "locked" and
+ "unlocked".
+ :vartype lease_status: str or ~azure.storage.fileshare.models.LeaseStatusType
+ :ivar lease_state: Lease state of the share. Known values are: "available", "leased",
+ "expired", "breaking", and "broken".
+ :vartype lease_state: str or ~azure.storage.fileshare.models.LeaseStateType
+ :ivar lease_duration: When a share is leased, specifies whether the lease is of infinite or
+ fixed duration. Known values are: "infinite" and "fixed".
+ :vartype lease_duration: str or ~azure.storage.fileshare.models.LeaseDurationType
+ :ivar enabled_protocols:
+ :vartype enabled_protocols: str
+ :ivar root_squash: Known values are: "NoRootSquash", "RootSquash", and "AllSquash".
+ :vartype root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :ivar enable_snapshot_virtual_directory_access:
+ :vartype enable_snapshot_virtual_directory_access: bool
+ :ivar paid_bursting_enabled:
+ :vartype paid_bursting_enabled: bool
+ :ivar paid_bursting_max_iops:
+ :vartype paid_bursting_max_iops: int
+ :ivar paid_bursting_max_bandwidth_mibps:
+ :vartype paid_bursting_max_bandwidth_mibps: int
+ :ivar included_burst_iops:
+ :vartype included_burst_iops: int
+ :ivar max_burst_credits_for_iops:
+ :vartype max_burst_credits_for_iops: int
+ :ivar next_allowed_provisioned_iops_downgrade_time:
+ :vartype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime
+ :ivar next_allowed_provisioned_bandwidth_downgrade_time:
+ :vartype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime
+ """
+
+ _validation = {
+ "last_modified": {"required": True},
+ "etag": {"required": True},
+ "quota": {"required": True},
+ }
+
+ _attribute_map = {
+ "last_modified": {"key": "Last-Modified", "type": "rfc-1123"},
+ "etag": {"key": "Etag", "type": "str"},
+ "quota": {"key": "Quota", "type": "int"},
+ "provisioned_iops": {"key": "ProvisionedIops", "type": "int"},
+ "provisioned_ingress_m_bps": {"key": "ProvisionedIngressMBps", "type": "int"},
+ "provisioned_egress_m_bps": {"key": "ProvisionedEgressMBps", "type": "int"},
+ "provisioned_bandwidth_mi_bps": {"key": "ProvisionedBandwidthMiBps", "type": "int"},
+ "next_allowed_quota_downgrade_time": {"key": "NextAllowedQuotaDowngradeTime", "type": "rfc-1123"},
+ "deleted_time": {"key": "DeletedTime", "type": "rfc-1123"},
+ "remaining_retention_days": {"key": "RemainingRetentionDays", "type": "int"},
+ "access_tier": {"key": "AccessTier", "type": "str"},
+ "access_tier_change_time": {"key": "AccessTierChangeTime", "type": "rfc-1123"},
+ "access_tier_transition_state": {"key": "AccessTierTransitionState", "type": "str"},
+ "lease_status": {"key": "LeaseStatus", "type": "str"},
+ "lease_state": {"key": "LeaseState", "type": "str"},
+ "lease_duration": {"key": "LeaseDuration", "type": "str"},
+ "enabled_protocols": {"key": "EnabledProtocols", "type": "str"},
+ "root_squash": {"key": "RootSquash", "type": "str"},
+ "enable_snapshot_virtual_directory_access": {"key": "EnableSnapshotVirtualDirectoryAccess", "type": "bool"},
+ "paid_bursting_enabled": {"key": "PaidBurstingEnabled", "type": "bool"},
+ "paid_bursting_max_iops": {"key": "PaidBurstingMaxIops", "type": "int"},
+ "paid_bursting_max_bandwidth_mibps": {"key": "PaidBurstingMaxBandwidthMibps", "type": "int"},
+ "included_burst_iops": {"key": "IncludedBurstIops", "type": "int"},
+ "max_burst_credits_for_iops": {"key": "MaxBurstCreditsForIops", "type": "int"},
+ "next_allowed_provisioned_iops_downgrade_time": {
+ "key": "NextAllowedProvisionedIopsDowngradeTime",
+ "type": "rfc-1123",
+ },
+ "next_allowed_provisioned_bandwidth_downgrade_time": {
+ "key": "NextAllowedProvisionedBandwidthDowngradeTime",
+ "type": "rfc-1123",
+ },
+ }
+
+ def __init__( # pylint: disable=too-many-locals
+ self,
+ *,
+ last_modified: datetime.datetime,
+ etag: str,
+ quota: int,
+ provisioned_iops: Optional[int] = None,
+ provisioned_ingress_m_bps: Optional[int] = None,
+ provisioned_egress_m_bps: Optional[int] = None,
+ provisioned_bandwidth_mi_bps: Optional[int] = None,
+ next_allowed_quota_downgrade_time: Optional[datetime.datetime] = None,
+ deleted_time: Optional[datetime.datetime] = None,
+ remaining_retention_days: Optional[int] = None,
+ access_tier: Optional[str] = None,
+ access_tier_change_time: Optional[datetime.datetime] = None,
+ access_tier_transition_state: Optional[str] = None,
+ lease_status: Optional[Union[str, "_models.LeaseStatusType"]] = None,
+ lease_state: Optional[Union[str, "_models.LeaseStateType"]] = None,
+ lease_duration: Optional[Union[str, "_models.LeaseDurationType"]] = None,
+ enabled_protocols: Optional[str] = None,
+ root_squash: Optional[Union[str, "_models.ShareRootSquash"]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ included_burst_iops: Optional[int] = None,
+ max_burst_credits_for_iops: Optional[int] = None,
+ next_allowed_provisioned_iops_downgrade_time: Optional[datetime.datetime] = None,
+ next_allowed_provisioned_bandwidth_downgrade_time: Optional[datetime.datetime] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword last_modified: Required.
+ :paramtype last_modified: ~datetime.datetime
+ :keyword etag: Required.
+ :paramtype etag: str
+ :keyword quota: Required.
+ :paramtype quota: int
+ :keyword provisioned_iops:
+ :paramtype provisioned_iops: int
+ :keyword provisioned_ingress_m_bps:
+ :paramtype provisioned_ingress_m_bps: int
+ :keyword provisioned_egress_m_bps:
+ :paramtype provisioned_egress_m_bps: int
+ :keyword provisioned_bandwidth_mi_bps:
+ :paramtype provisioned_bandwidth_mi_bps: int
+ :keyword next_allowed_quota_downgrade_time:
+ :paramtype next_allowed_quota_downgrade_time: ~datetime.datetime
+ :keyword deleted_time:
+ :paramtype deleted_time: ~datetime.datetime
+ :keyword remaining_retention_days:
+ :paramtype remaining_retention_days: int
+ :keyword access_tier:
+ :paramtype access_tier: str
+ :keyword access_tier_change_time:
+ :paramtype access_tier_change_time: ~datetime.datetime
+ :keyword access_tier_transition_state:
+ :paramtype access_tier_transition_state: str
+ :keyword lease_status: The current lease status of the share. Known values are: "locked" and
+ "unlocked".
+ :paramtype lease_status: str or ~azure.storage.fileshare.models.LeaseStatusType
+ :keyword lease_state: Lease state of the share. Known values are: "available", "leased",
+ "expired", "breaking", and "broken".
+ :paramtype lease_state: str or ~azure.storage.fileshare.models.LeaseStateType
+ :keyword lease_duration: When a share is leased, specifies whether the lease is of infinite or
+ fixed duration. Known values are: "infinite" and "fixed".
+ :paramtype lease_duration: str or ~azure.storage.fileshare.models.LeaseDurationType
+ :keyword enabled_protocols:
+ :paramtype enabled_protocols: str
+ :keyword root_squash: Known values are: "NoRootSquash", "RootSquash", and "AllSquash".
+ :paramtype root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :keyword enable_snapshot_virtual_directory_access:
+ :paramtype enable_snapshot_virtual_directory_access: bool
+ :keyword paid_bursting_enabled:
+ :paramtype paid_bursting_enabled: bool
+ :keyword paid_bursting_max_iops:
+ :paramtype paid_bursting_max_iops: int
+ :keyword paid_bursting_max_bandwidth_mibps:
+ :paramtype paid_bursting_max_bandwidth_mibps: int
+ :keyword included_burst_iops:
+ :paramtype included_burst_iops: int
+ :keyword max_burst_credits_for_iops:
+ :paramtype max_burst_credits_for_iops: int
+ :keyword next_allowed_provisioned_iops_downgrade_time:
+ :paramtype next_allowed_provisioned_iops_downgrade_time: ~datetime.datetime
+ :keyword next_allowed_provisioned_bandwidth_downgrade_time:
+ :paramtype next_allowed_provisioned_bandwidth_downgrade_time: ~datetime.datetime
+ """
+ super().__init__(**kwargs)
+ self.last_modified = last_modified
+ self.etag = etag
+ self.quota = quota
+ self.provisioned_iops = provisioned_iops
+ self.provisioned_ingress_m_bps = provisioned_ingress_m_bps
+ self.provisioned_egress_m_bps = provisioned_egress_m_bps
+ self.provisioned_bandwidth_mi_bps = provisioned_bandwidth_mi_bps
+ self.next_allowed_quota_downgrade_time = next_allowed_quota_downgrade_time
+ self.deleted_time = deleted_time
+ self.remaining_retention_days = remaining_retention_days
+ self.access_tier = access_tier
+ self.access_tier_change_time = access_tier_change_time
+ self.access_tier_transition_state = access_tier_transition_state
+ self.lease_status = lease_status
+ self.lease_state = lease_state
+ self.lease_duration = lease_duration
+ self.enabled_protocols = enabled_protocols
+ self.root_squash = root_squash
+ self.enable_snapshot_virtual_directory_access = enable_snapshot_virtual_directory_access
+ self.paid_bursting_enabled = paid_bursting_enabled
+ self.paid_bursting_max_iops = paid_bursting_max_iops
+ self.paid_bursting_max_bandwidth_mibps = paid_bursting_max_bandwidth_mibps
+ self.included_burst_iops = included_burst_iops
+ self.max_burst_credits_for_iops = max_burst_credits_for_iops
+ self.next_allowed_provisioned_iops_downgrade_time = next_allowed_provisioned_iops_downgrade_time
+ self.next_allowed_provisioned_bandwidth_downgrade_time = next_allowed_provisioned_bandwidth_downgrade_time
+
+
+class ShareProtocolSettings(_serialization.Model):
+ """Protocol settings.
+
+ :ivar smb: Settings for SMB protocol.
+ :vartype smb: ~azure.storage.fileshare.models.ShareSmbSettings
+ """
+
+ _attribute_map = {
+ "smb": {"key": "Smb", "type": "ShareSmbSettings"},
+ }
+ _xml_map = {"name": "ProtocolSettings"}
+
+ def __init__(self, *, smb: Optional["_models.ShareSmbSettings"] = None, **kwargs: Any) -> None:
+ """
+ :keyword smb: Settings for SMB protocol.
+ :paramtype smb: ~azure.storage.fileshare.models.ShareSmbSettings
+ """
+ super().__init__(**kwargs)
+ self.smb = smb
+
+
+class ShareSmbSettings(_serialization.Model):
+ """Settings for SMB protocol.
+
+ :ivar multichannel: Settings for SMB Multichannel.
+ :vartype multichannel: ~azure.storage.fileshare.models.SmbMultichannel
+ """
+
+ _attribute_map = {
+ "multichannel": {"key": "Multichannel", "type": "SmbMultichannel"},
+ }
+ _xml_map = {"name": "SMB"}
+
+ def __init__(self, *, multichannel: Optional["_models.SmbMultichannel"] = None, **kwargs: Any) -> None:
+ """
+ :keyword multichannel: Settings for SMB Multichannel.
+ :paramtype multichannel: ~azure.storage.fileshare.models.SmbMultichannel
+ """
+ super().__init__(**kwargs)
+ self.multichannel = multichannel
+
+
+class ShareStats(_serialization.Model):
+ """Stats for the share.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar share_usage_bytes: The approximate size of the data stored in bytes. Note that this value
+ may not include all recently created or recently resized files. Required.
+ :vartype share_usage_bytes: int
+ """
+
+ _validation = {
+ "share_usage_bytes": {"required": True},
+ }
+
+ _attribute_map = {
+ "share_usage_bytes": {"key": "ShareUsageBytes", "type": "int"},
+ }
+
+ def __init__(self, *, share_usage_bytes: int, **kwargs: Any) -> None:
+ """
+ :keyword share_usage_bytes: The approximate size of the data stored in bytes. Note that this
+ value may not include all recently created or recently resized files. Required.
+ :paramtype share_usage_bytes: int
+ """
+ super().__init__(**kwargs)
+ self.share_usage_bytes = share_usage_bytes
+
+
+class SignedIdentifier(_serialization.Model):
+ """Signed identifier.
+
+ All required parameters must be populated in order to send to server.
+
+ :ivar id: A unique id. Required.
+ :vartype id: str
+ :ivar access_policy: The access policy.
+ :vartype access_policy: ~azure.storage.fileshare.models.AccessPolicy
+ """
+
+ _validation = {
+ "id": {"required": True},
+ }
+
+ _attribute_map = {
+ "id": {"key": "Id", "type": "str"},
+ "access_policy": {"key": "AccessPolicy", "type": "AccessPolicy"},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str, # pylint: disable=redefined-builtin
+ access_policy: Optional["_models.AccessPolicy"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword id: A unique id. Required.
+ :paramtype id: str
+ :keyword access_policy: The access policy.
+ :paramtype access_policy: ~azure.storage.fileshare.models.AccessPolicy
+ """
+ super().__init__(**kwargs)
+ self.id = id
+ self.access_policy = access_policy
+
+
+class SmbMultichannel(_serialization.Model):
+ """Settings for SMB multichannel.
+
+ :ivar enabled: If SMB multichannel is enabled.
+ :vartype enabled: bool
+ """
+
+ _attribute_map = {
+ "enabled": {"key": "Enabled", "type": "bool"},
+ }
+ _xml_map = {"name": "Multichannel"}
+
+ def __init__(self, *, enabled: Optional[bool] = None, **kwargs: Any) -> None:
+ """
+ :keyword enabled: If SMB multichannel is enabled.
+ :paramtype enabled: bool
+ """
+ super().__init__(**kwargs)
+ self.enabled = enabled
+
+
+class SourceLeaseAccessConditions(_serialization.Model):
+ """Parameter group.
+
+ :ivar source_lease_id: Required if the source file has an active infinite lease.
+ :vartype source_lease_id: str
+ """
+
+ _attribute_map = {
+ "source_lease_id": {"key": "sourceLeaseId", "type": "str"},
+ }
+
+ def __init__(self, *, source_lease_id: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword source_lease_id: Required if the source file has an active infinite lease.
+ :paramtype source_lease_id: str
+ """
+ super().__init__(**kwargs)
+ self.source_lease_id = source_lease_id
+
+
+class SourceModifiedAccessConditions(_serialization.Model):
+ """Parameter group.
+
+ :ivar source_if_match_crc64: Specify the crc64 value to operate only on range with a matching
+ crc64 checksum.
+ :vartype source_if_match_crc64: bytes
+ :ivar source_if_none_match_crc64: Specify the crc64 value to operate only on range without a
+ matching crc64 checksum.
+ :vartype source_if_none_match_crc64: bytes
+ """
+
+ _attribute_map = {
+ "source_if_match_crc64": {"key": "sourceIfMatchCrc64", "type": "bytearray"},
+ "source_if_none_match_crc64": {"key": "sourceIfNoneMatchCrc64", "type": "bytearray"},
+ }
+
+ def __init__(
+ self,
+ *,
+ source_if_match_crc64: Optional[bytes] = None,
+ source_if_none_match_crc64: Optional[bytes] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword source_if_match_crc64: Specify the crc64 value to operate only on range with a
+ matching crc64 checksum.
+ :paramtype source_if_match_crc64: bytes
+ :keyword source_if_none_match_crc64: Specify the crc64 value to operate only on range without a
+ matching crc64 checksum.
+ :paramtype source_if_none_match_crc64: bytes
+ """
+ super().__init__(**kwargs)
+ self.source_if_match_crc64 = source_if_match_crc64
+ self.source_if_none_match_crc64 = source_if_none_match_crc64
+
+
+class StorageError(_serialization.Model):
+ """StorageError.
+
+ :ivar message:
+ :vartype message: str
+ :ivar authentication_error_detail:
+ :vartype authentication_error_detail: str
+ """
+
+ _attribute_map = {
+ "message": {"key": "Message", "type": "str"},
+ "authentication_error_detail": {"key": "AuthenticationErrorDetail", "type": "str"},
+ }
+
+ def __init__(
+ self, *, message: Optional[str] = None, authentication_error_detail: Optional[str] = None, **kwargs: Any
+ ) -> None:
+ """
+ :keyword message:
+ :paramtype message: str
+ :keyword authentication_error_detail:
+ :paramtype authentication_error_detail: str
+ """
+ super().__init__(**kwargs)
+ self.message = message
+ self.authentication_error_detail = authentication_error_detail
+
+
+class StorageServiceProperties(_serialization.Model):
+ """Storage service properties.
+
+ :ivar hour_metrics: A summary of request statistics grouped by API in hourly aggregates for
+ files.
+ :vartype hour_metrics: ~azure.storage.fileshare.models.Metrics
+ :ivar minute_metrics: A summary of request statistics grouped by API in minute aggregates for
+ files.
+ :vartype minute_metrics: ~azure.storage.fileshare.models.Metrics
+ :ivar cors: The set of CORS rules.
+ :vartype cors: list[~azure.storage.fileshare.models.CorsRule]
+ :ivar protocol: Protocol settings.
+ :vartype protocol: ~azure.storage.fileshare.models.ShareProtocolSettings
+ """
+
+ _attribute_map = {
+ "hour_metrics": {"key": "HourMetrics", "type": "Metrics"},
+ "minute_metrics": {"key": "MinuteMetrics", "type": "Metrics"},
+ "cors": {"key": "Cors", "type": "[CorsRule]", "xml": {"wrapped": True}},
+ "protocol": {"key": "Protocol", "type": "ShareProtocolSettings"},
+ }
+
+ def __init__(
+ self,
+ *,
+ hour_metrics: Optional["_models.Metrics"] = None,
+ minute_metrics: Optional["_models.Metrics"] = None,
+ cors: Optional[List["_models.CorsRule"]] = None,
+ protocol: Optional["_models.ShareProtocolSettings"] = None,
+ **kwargs: Any
+ ) -> None:
+ """
+ :keyword hour_metrics: A summary of request statistics grouped by API in hourly aggregates for
+ files.
+ :paramtype hour_metrics: ~azure.storage.fileshare.models.Metrics
+ :keyword minute_metrics: A summary of request statistics grouped by API in minute aggregates
+ for files.
+ :paramtype minute_metrics: ~azure.storage.fileshare.models.Metrics
+ :keyword cors: The set of CORS rules.
+ :paramtype cors: list[~azure.storage.fileshare.models.CorsRule]
+ :keyword protocol: Protocol settings.
+ :paramtype protocol: ~azure.storage.fileshare.models.ShareProtocolSettings
+ """
+ super().__init__(**kwargs)
+ self.hour_metrics = hour_metrics
+ self.minute_metrics = minute_metrics
+ self.cors = cors
+ self.protocol = protocol
+
+
+class StringEncoded(_serialization.Model):
+ """StringEncoded.
+
+ :ivar encoded:
+ :vartype encoded: bool
+ :ivar content:
+ :vartype content: str
+ """
+
+ _attribute_map = {
+ "encoded": {"key": "Encoded", "type": "bool", "xml": {"name": "Encoded", "attr": True}},
+ "content": {"key": "content", "type": "str", "xml": {"text": True}},
+ }
+
+ def __init__(self, *, encoded: Optional[bool] = None, content: Optional[str] = None, **kwargs: Any) -> None:
+ """
+ :keyword encoded:
+ :paramtype encoded: bool
+ :keyword content:
+ :paramtype content: str
+ """
+ super().__init__(**kwargs)
+ self.encoded = encoded
+ self.content = content
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_patch.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/models/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/__init__.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/__init__.py
new file mode 100644
index 00000000..092b7efd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/__init__.py
@@ -0,0 +1,31 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wrong-import-position
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from ._patch import * # pylint: disable=unused-wildcard-import
+
+from ._service_operations import ServiceOperations # type: ignore
+from ._share_operations import ShareOperations # type: ignore
+from ._directory_operations import DirectoryOperations # type: ignore
+from ._file_operations import FileOperations # type: ignore
+
+from ._patch import __all__ as _patch_all
+from ._patch import *
+from ._patch import patch_sdk as _patch_sdk
+
+__all__ = [
+ "ServiceOperations",
+ "ShareOperations",
+ "DirectoryOperations",
+ "FileOperations",
+]
+__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
+_patch_sdk()
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_directory_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_directory_operations.py
new file mode 100644
index 00000000..18e4eabd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_directory_operations.py
@@ -0,0 +1,1570 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
+
+from azure.core import PipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._configuration import AzureFileStorageConfiguration
+from .._serialization import Deserializer, Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_create_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ if file_mode is not None:
+ _headers["x-ms-mode"] = _SERIALIZER.header("file_mode", file_mode, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_properties_request(
+ url: str,
+ *,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_properties_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ if file_mode is not None:
+ _headers["x-ms-mode"] = _SERIALIZER.header("file_mode", file_mode, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_metadata_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_files_and_directories_segment_request( # pylint: disable=name-too-long
+ url: str,
+ *,
+ prefix: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListFilesIncludeType]]] = None,
+ include_extended_info: Optional[bool] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if prefix is not None:
+ _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if maxresults is not None:
+ _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1)
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if include is not None:
+ _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",")
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if include_extended_info is not None:
+ _headers["x-ms-file-extended-info"] = _SERIALIZER.header("include_extended_info", include_extended_info, "bool")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_handles_request(
+ url: str,
+ *,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if maxresults is not None:
+ _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1)
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ if recursive is not None:
+ _headers["x-ms-recursive"] = _SERIALIZER.header("recursive", recursive, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_force_close_handles_request(
+ url: str,
+ *,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-handle-id"] = _SERIALIZER.header("handle_id", handle_id, "str")
+ if recursive is not None:
+ _headers["x-ms-recursive"] = _SERIALIZER.header("recursive", recursive, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_rename_request(
+ url: str,
+ *,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ source_lease_id: Optional[str] = None,
+ destination_lease_id: Optional[str] = None,
+ file_attributes: Optional[str] = None,
+ file_creation_time: Optional[str] = None,
+ file_last_write_time: Optional[str] = None,
+ file_change_time: Optional[str] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ _headers["x-ms-file-rename-source"] = _SERIALIZER.header("rename_source", rename_source, "str")
+ if replace_if_exists is not None:
+ _headers["x-ms-file-rename-replace-if-exists"] = _SERIALIZER.header(
+ "replace_if_exists", replace_if_exists, "bool"
+ )
+ if ignore_read_only is not None:
+ _headers["x-ms-file-rename-ignore-readonly"] = _SERIALIZER.header("ignore_read_only", ignore_read_only, "bool")
+ if source_lease_id is not None:
+ _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str")
+ if destination_lease_id is not None:
+ _headers["x-ms-destination-lease-id"] = _SERIALIZER.header("destination_lease_id", destination_lease_id, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if allow_source_trailing_dot is not None:
+ _headers["x-ms-source-allow-trailing-dot"] = _SERIALIZER.header(
+ "allow_source_trailing_dot", allow_source_trailing_dot, "bool"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class DirectoryOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.AzureFileStorage`'s
+ :attr:`directory` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def create( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new directory under the specified share or parent directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_properties( # pylint: disable=inconsistent-return-statements
+ self, sharesnapshot: Optional[str] = None, timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all system properties for the specified directory, and can also be used to check the
+ existence of a directory. The data returned does not include the files in the directory or any
+ subdirectories.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def delete( # pylint: disable=inconsistent-return-statements
+ self, timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Removes the specified empty directory. Note that the directory must be empty before it can be
+ deleted.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_delete_request(
+ url=self._config.url,
+ timeout=timeout,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def set_properties( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties on the directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def set_metadata( # pylint: disable=inconsistent-return-statements
+ self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Updates user defined metadata for the specified directory.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def list_files_and_directories_segment(
+ self,
+ prefix: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListFilesIncludeType]]] = None,
+ include_extended_info: Optional[bool] = None,
+ **kwargs: Any
+ ) -> _models.ListFilesAndDirectoriesSegmentResponse:
+ # pylint: disable=line-too-long
+ """Returns a list of files or directories under the specified share or directory. It lists the
+ contents only for a single level of the directory hierarchy.
+
+ :param prefix: Filters the results to return only entries whose name begins with the specified
+ prefix. Default value is None.
+ :type prefix: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param include: Include this parameter to specify one or more datasets to include in the
+ response. Default value is None.
+ :type include: list[str or ~azure.storage.fileshare.models.ListFilesIncludeType]
+ :param include_extended_info: Include extended information. Default value is None.
+ :type include_extended_info: bool
+ :return: ListFilesAndDirectoriesSegmentResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListFilesAndDirectoriesSegmentResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ cls: ClsType[_models.ListFilesAndDirectoriesSegmentResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_files_and_directories_segment_request(
+ url=self._config.url,
+ prefix=prefix,
+ sharesnapshot=sharesnapshot,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ include=include,
+ include_extended_info=include_extended_info,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListFilesAndDirectoriesSegmentResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def list_handles(
+ self,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ **kwargs: Any
+ ) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
+ """Lists handles for directory.
+
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param recursive: Specifies operation should apply to the directory specified in the URI, its
+ files, its subdirectories and their files. Default value is None.
+ :type recursive: bool
+ :return: ListHandlesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListHandlesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ cls: ClsType[_models.ListHandlesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_handles_request(
+ url=self._config.url,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ recursive=recursive,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def force_close_handles( # pylint: disable=inconsistent-return-statements
+ self,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ recursive: Optional[bool] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Closes all handles open for given directory.
+
+ :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
+ (‘*’) is a wildcard that specifies all handles. Required.
+ :type handle_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param recursive: Specifies operation should apply to the directory specified in the URI, its
+ files, its subdirectories and their files. Default value is None.
+ :type recursive: bool
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_force_close_handles_request(
+ url=self._config.url,
+ handle_id=handle_id,
+ timeout=timeout,
+ marker=marker,
+ sharesnapshot=sharesnapshot,
+ recursive=recursive,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-marker"] = self._deserialize("str", response.headers.get("x-ms-marker"))
+ response_headers["x-ms-number-of-handles-closed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-closed")
+ )
+ response_headers["x-ms-number-of-handles-failed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-failed")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def rename( # pylint: disable=inconsistent-return-statements
+ self,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ source_lease_access_conditions: Optional[_models.SourceLeaseAccessConditions] = None,
+ destination_lease_access_conditions: Optional[_models.DestinationLeaseAccessConditions] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Renames a directory.
+
+ :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
+ length. Required.
+ :type rename_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param replace_if_exists: Optional. A boolean value for if the destination file already exists,
+ whether this request will overwrite the file or not. If true, the rename will succeed and will
+ overwrite the destination file. If not provided or if false and the destination file does
+ exist, the request will not overwrite the destination file. If provided and the destination
+ file doesn’t exist, the rename will succeed. Note: This value does not override the
+ x-ms-file-copy-ignore-read-only header value. Default value is None.
+ :type replace_if_exists: bool
+ :param ignore_read_only: Optional. A boolean value that specifies whether the ReadOnly
+ attribute on a preexisting destination file should be respected. If true, the rename will
+ succeed, otherwise, a previous file at the destination with the ReadOnly attribute set will
+ cause the rename to fail. Default value is None.
+ :type ignore_read_only: bool
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param source_lease_access_conditions: Parameter group. Default value is None.
+ :type source_lease_access_conditions:
+ ~azure.storage.fileshare.models.SourceLeaseAccessConditions
+ :param destination_lease_access_conditions: Parameter group. Default value is None.
+ :type destination_lease_access_conditions:
+ ~azure.storage.fileshare.models.DestinationLeaseAccessConditions
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["directory"] = kwargs.pop("restype", _params.pop("restype", "directory"))
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_lease_id = None
+ _destination_lease_id = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ if source_lease_access_conditions is not None:
+ _source_lease_id = source_lease_access_conditions.source_lease_id
+ if destination_lease_access_conditions is not None:
+ _destination_lease_id = destination_lease_access_conditions.destination_lease_id
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+
+ _request = build_rename_request(
+ url=self._config.url,
+ rename_source=rename_source,
+ timeout=timeout,
+ replace_if_exists=replace_if_exists,
+ ignore_read_only=ignore_read_only,
+ source_lease_id=_source_lease_id,
+ destination_lease_id=_destination_lease_id,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ metadata=metadata,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_file_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_file_operations.py
new file mode 100644
index 00000000..d67f90d9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_file_operations.py
@@ -0,0 +1,3755 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, Callable, Dict, IO, Iterator, Literal, Optional, TypeVar, Union
+
+from azure.core import PipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ StreamClosedError,
+ StreamConsumedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._configuration import AzureFileStorageConfiguration
+from .._serialization import Deserializer, Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_create_request(
+ url: str,
+ *,
+ file_content_length: int,
+ timeout: Optional[int] = None,
+ file_content_type: Optional[str] = None,
+ file_content_encoding: Optional[str] = None,
+ file_content_language: Optional[str] = None,
+ file_cache_control: Optional[str] = None,
+ file_content_md5: Optional[bytes] = None,
+ file_content_disposition: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ nfs_file_type: Optional[Union[str, _models.NfsFileType]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ _headers["x-ms-content-length"] = _SERIALIZER.header("file_content_length", file_content_length, "int")
+ _headers["x-ms-type"] = _SERIALIZER.header("file_type_constant", file_type_constant, "str")
+ if file_content_type is not None:
+ _headers["x-ms-content-type"] = _SERIALIZER.header("file_content_type", file_content_type, "str")
+ if file_content_encoding is not None:
+ _headers["x-ms-content-encoding"] = _SERIALIZER.header("file_content_encoding", file_content_encoding, "str")
+ if file_content_language is not None:
+ _headers["x-ms-content-language"] = _SERIALIZER.header("file_content_language", file_content_language, "str")
+ if file_cache_control is not None:
+ _headers["x-ms-cache-control"] = _SERIALIZER.header("file_cache_control", file_cache_control, "str")
+ if file_content_md5 is not None:
+ _headers["x-ms-content-md5"] = _SERIALIZER.header("file_content_md5", file_content_md5, "bytearray")
+ if file_content_disposition is not None:
+ _headers["x-ms-content-disposition"] = _SERIALIZER.header(
+ "file_content_disposition", file_content_disposition, "str"
+ )
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ if file_mode is not None:
+ _headers["x-ms-mode"] = _SERIALIZER.header("file_mode", file_mode, "str")
+ if nfs_file_type is not None:
+ _headers["x-ms-file-file-type"] = _SERIALIZER.header("nfs_file_type", nfs_file_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_download_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if range is not None:
+ _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str")
+ if range_get_content_md5 is not None:
+ _headers["x-ms-range-get-content-md5"] = _SERIALIZER.header(
+ "range_get_content_md5", range_get_content_md5, "bool"
+ )
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_properties_request(
+ url: str,
+ *,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_http_headers_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ file_content_length: Optional[int] = None,
+ file_content_type: Optional[str] = None,
+ file_content_encoding: Optional[str] = None,
+ file_content_language: Optional[str] = None,
+ file_cache_control: Optional[str] = None,
+ file_content_md5: Optional[bytes] = None,
+ file_content_disposition: Optional[str] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_content_length is not None:
+ _headers["x-ms-content-length"] = _SERIALIZER.header("file_content_length", file_content_length, "int")
+ if file_content_type is not None:
+ _headers["x-ms-content-type"] = _SERIALIZER.header("file_content_type", file_content_type, "str")
+ if file_content_encoding is not None:
+ _headers["x-ms-content-encoding"] = _SERIALIZER.header("file_content_encoding", file_content_encoding, "str")
+ if file_content_language is not None:
+ _headers["x-ms-content-language"] = _SERIALIZER.header("file_content_language", file_content_language, "str")
+ if file_cache_control is not None:
+ _headers["x-ms-cache-control"] = _SERIALIZER.header("file_cache_control", file_cache_control, "str")
+ if file_content_md5 is not None:
+ _headers["x-ms-content-md5"] = _SERIALIZER.header("file_content_md5", file_content_md5, "bytearray")
+ if file_content_disposition is not None:
+ _headers["x-ms-content-disposition"] = _SERIALIZER.header(
+ "file_content_disposition", file_content_disposition, "str"
+ )
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ if file_mode is not None:
+ _headers["x-ms-mode"] = _SERIALIZER.header("file_mode", file_mode, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_metadata_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_id: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_acquire_lease_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ if duration is not None:
+ _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int")
+ if proposed_lease_id is not None:
+ _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_release_lease_request(
+ url: str,
+ *,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_change_lease_request(
+ url: str,
+ *,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if proposed_lease_id is not None:
+ _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_break_lease_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_upload_range_request(
+ url: str,
+ *,
+ range: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ file_range_write: Union[str, _models.FileRangeWriteType] = "update",
+ content_md5: Optional[bytes] = None,
+ lease_id: Optional[str] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
+ content: Optional[IO[bytes]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str")
+ _headers["x-ms-write"] = _SERIALIZER.header("file_range_write", file_range_write, "str")
+ _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int")
+ if content_md5 is not None:
+ _headers["Content-MD5"] = _SERIALIZER.header("content_md5", content_md5, "bytearray")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_last_written_mode is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header(
+ "file_last_written_mode", file_last_written_mode, "str"
+ )
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if structured_body_type is not None:
+ _headers["x-ms-structured-body"] = _SERIALIZER.header("structured_body_type", structured_body_type, "str")
+ if structured_content_length is not None:
+ _headers["x-ms-structured-content-length"] = _SERIALIZER.header(
+ "structured_content_length", structured_content_length, "int"
+ )
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs)
+
+
+def build_upload_range_from_url_request(
+ url: str,
+ *,
+ range: str,
+ copy_source: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ source_range: Optional[str] = None,
+ source_content_crc64: Optional[bytes] = None,
+ source_if_match_crc64: Optional[bytes] = None,
+ source_if_none_match_crc64: Optional[bytes] = None,
+ lease_id: Optional[str] = None,
+ copy_source_authorization: Optional[str] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ file_range_write_from_url: Literal["update"] = kwargs.pop(
+ "file_range_write_from_url", _headers.pop("x-ms-write", "update")
+ )
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str")
+ _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str")
+ if source_range is not None:
+ _headers["x-ms-source-range"] = _SERIALIZER.header("source_range", source_range, "str")
+ _headers["x-ms-write"] = _SERIALIZER.header("file_range_write_from_url", file_range_write_from_url, "str")
+ _headers["Content-Length"] = _SERIALIZER.header("content_length", content_length, "int")
+ if source_content_crc64 is not None:
+ _headers["x-ms-source-content-crc64"] = _SERIALIZER.header(
+ "source_content_crc64", source_content_crc64, "bytearray"
+ )
+ if source_if_match_crc64 is not None:
+ _headers["x-ms-source-if-match-crc64"] = _SERIALIZER.header(
+ "source_if_match_crc64", source_if_match_crc64, "bytearray"
+ )
+ if source_if_none_match_crc64 is not None:
+ _headers["x-ms-source-if-none-match-crc64"] = _SERIALIZER.header(
+ "source_if_none_match_crc64", source_if_none_match_crc64, "bytearray"
+ )
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if copy_source_authorization is not None:
+ _headers["x-ms-copy-source-authorization"] = _SERIALIZER.header(
+ "copy_source_authorization", copy_source_authorization, "str"
+ )
+ if file_last_written_mode is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header(
+ "file_last_written_mode", file_last_written_mode, "str"
+ )
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if allow_source_trailing_dot is not None:
+ _headers["x-ms-source-allow-trailing-dot"] = _SERIALIZER.header(
+ "allow_source_trailing_dot", allow_source_trailing_dot, "bool"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_range_list_request(
+ url: str,
+ *,
+ sharesnapshot: Optional[str] = None,
+ prevsharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ support_rename: Optional[bool] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rangelist"] = kwargs.pop("comp", _params.pop("comp", "rangelist"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if prevsharesnapshot is not None:
+ _params["prevsharesnapshot"] = _SERIALIZER.query("prevsharesnapshot", prevsharesnapshot, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if range is not None:
+ _headers["x-ms-range"] = _SERIALIZER.header("range", range, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if support_rename is not None:
+ _headers["x-ms-file-support-rename"] = _SERIALIZER.header("support_rename", support_rename, "bool")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_start_copy_request(
+ url: str,
+ *,
+ copy_source: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_permission_copy_mode: Optional[Union[str, _models.PermissionCopyModeType]] = None,
+ ignore_read_only: Optional[bool] = None,
+ file_attributes: Optional[str] = None,
+ file_creation_time: Optional[str] = None,
+ file_last_write_time: Optional[str] = None,
+ file_change_time: Optional[str] = None,
+ set_archive_attribute: Optional[bool] = None,
+ lease_id: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ file_mode_copy_mode: Optional[Union[str, _models.ModeCopyMode]] = None,
+ file_owner_copy_mode: Optional[Union[str, _models.OwnerCopyMode]] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-copy-source"] = _SERIALIZER.header("copy_source", copy_source, "str")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_permission_copy_mode is not None:
+ _headers["x-ms-file-permission-copy-mode"] = _SERIALIZER.header(
+ "file_permission_copy_mode", file_permission_copy_mode, "str"
+ )
+ if ignore_read_only is not None:
+ _headers["x-ms-file-copy-ignore-readonly"] = _SERIALIZER.header("ignore_read_only", ignore_read_only, "bool")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if set_archive_attribute is not None:
+ _headers["x-ms-file-copy-set-archive"] = _SERIALIZER.header(
+ "set_archive_attribute", set_archive_attribute, "bool"
+ )
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if allow_source_trailing_dot is not None:
+ _headers["x-ms-source-allow-trailing-dot"] = _SERIALIZER.header(
+ "allow_source_trailing_dot", allow_source_trailing_dot, "bool"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ if file_mode is not None:
+ _headers["x-ms-mode"] = _SERIALIZER.header("file_mode", file_mode, "str")
+ if file_mode_copy_mode is not None:
+ _headers["x-ms-file-mode-copy-mode"] = _SERIALIZER.header("file_mode_copy_mode", file_mode_copy_mode, "str")
+ if file_owner_copy_mode is not None:
+ _headers["x-ms-file-owner-copy-mode"] = _SERIALIZER.header("file_owner_copy_mode", file_owner_copy_mode, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_abort_copy_request(
+ url: str,
+ *,
+ copy_id: str,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy"))
+ copy_action_abort_constant: Literal["abort"] = kwargs.pop(
+ "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort")
+ )
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["copyid"] = _SERIALIZER.query("copy_id", copy_id, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-copy-action"] = _SERIALIZER.header("copy_action_abort_constant", copy_action_abort_constant, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_handles_request(
+ url: str,
+ *,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if maxresults is not None:
+ _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1)
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_force_close_handles_request(
+ url: str,
+ *,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-handle-id"] = _SERIALIZER.header("handle_id", handle_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_rename_request(
+ url: str,
+ *,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ source_lease_id: Optional[str] = None,
+ destination_lease_id: Optional[str] = None,
+ file_attributes: Optional[str] = None,
+ file_creation_time: Optional[str] = None,
+ file_last_write_time: Optional[str] = None,
+ file_change_time: Optional[str] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_content_type: Optional[str] = None,
+ allow_trailing_dot: Optional[bool] = None,
+ allow_source_trailing_dot: Optional[bool] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ _headers["x-ms-file-rename-source"] = _SERIALIZER.header("rename_source", rename_source, "str")
+ if replace_if_exists is not None:
+ _headers["x-ms-file-rename-replace-if-exists"] = _SERIALIZER.header(
+ "replace_if_exists", replace_if_exists, "bool"
+ )
+ if ignore_read_only is not None:
+ _headers["x-ms-file-rename-ignore-readonly"] = _SERIALIZER.header("ignore_read_only", ignore_read_only, "bool")
+ if source_lease_id is not None:
+ _headers["x-ms-source-lease-id"] = _SERIALIZER.header("source_lease_id", source_lease_id, "str")
+ if destination_lease_id is not None:
+ _headers["x-ms-destination-lease-id"] = _SERIALIZER.header("destination_lease_id", destination_lease_id, "str")
+ if file_attributes is not None:
+ _headers["x-ms-file-attributes"] = _SERIALIZER.header("file_attributes", file_attributes, "str")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if file_change_time is not None:
+ _headers["x-ms-file-change-time"] = _SERIALIZER.header("file_change_time", file_change_time, "str")
+ if file_permission is not None:
+ _headers["x-ms-file-permission"] = _SERIALIZER.header("file_permission", file_permission, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ if file_permission_key is not None:
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ if file_content_type is not None:
+ _headers["x-ms-content-type"] = _SERIALIZER.header("file_content_type", file_content_type, "str")
+ if allow_trailing_dot is not None:
+ _headers["x-ms-allow-trailing-dot"] = _SERIALIZER.header("allow_trailing_dot", allow_trailing_dot, "bool")
+ if allow_source_trailing_dot is not None:
+ _headers["x-ms-source-allow-trailing-dot"] = _SERIALIZER.header(
+ "allow_source_trailing_dot", allow_source_trailing_dot, "bool"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_symbolic_link_request(
+ url: str,
+ *,
+ link_text: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ request_id_parameter: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ if file_creation_time is not None:
+ _headers["x-ms-file-creation-time"] = _SERIALIZER.header("file_creation_time", file_creation_time, "str")
+ if file_last_write_time is not None:
+ _headers["x-ms-file-last-write-time"] = _SERIALIZER.header("file_last_write_time", file_last_write_time, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if owner is not None:
+ _headers["x-ms-owner"] = _SERIALIZER.header("owner", owner, "str")
+ if group is not None:
+ _headers["x-ms-group"] = _SERIALIZER.header("group", group, "str")
+ _headers["x-ms-link-text"] = _SERIALIZER.header("link_text", link_text, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_symbolic_link_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_hard_link_request(
+ url: str,
+ *,
+ target_file: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["hardlink"] = kwargs.pop("restype", _params.pop("restype", "hardlink"))
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ _headers["x-ms-type"] = _SERIALIZER.header("file_type_constant", file_type_constant, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-file-target-file"] = _SERIALIZER.header("target_file", target_file, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class FileOperations: # pylint: disable=too-many-public-methods
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.AzureFileStorage`'s
+ :attr:`file` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def create( # pylint: disable=inconsistent-return-statements
+ self,
+ file_content_length: int,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ nfs_file_type: Optional[Union[str, _models.NfsFileType]] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new file or replaces a file. Note it only initializes the file with no content.
+
+ :param file_content_length: Specifies the maximum size for the file, up to 4 TB. Required.
+ :type file_content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param nfs_file_type: Optional, NFS only. Type of the file or directory. Known values are:
+ "Regular", "Directory", and "SymLink". Default value is None.
+ :type nfs_file_type: str or ~azure.storage.fileshare.models.NfsFileType
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = kwargs.pop("params", {}) or {}
+
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_content_type = None
+ _file_content_encoding = None
+ _file_content_language = None
+ _file_cache_control = None
+ _file_content_md5 = None
+ _file_content_disposition = None
+ _lease_id = None
+ if file_http_headers is not None:
+ _file_cache_control = file_http_headers.file_cache_control
+ _file_content_disposition = file_http_headers.file_content_disposition
+ _file_content_encoding = file_http_headers.file_content_encoding
+ _file_content_language = file_http_headers.file_content_language
+ _file_content_md5 = file_http_headers.file_content_md5
+ _file_content_type = file_http_headers.file_content_type
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_request(
+ url=self._config.url,
+ file_content_length=file_content_length,
+ timeout=timeout,
+ file_content_type=_file_content_type,
+ file_content_encoding=_file_content_encoding,
+ file_content_language=_file_content_language,
+ file_cache_control=_file_cache_control,
+ file_content_md5=_file_content_md5,
+ file_content_disposition=_file_content_disposition,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ nfs_file_type=nfs_file_type,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ file_type_constant=file_type_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def download(
+ self,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ range_get_content_md5: Optional[bool] = None,
+ structured_body_type: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> Iterator[bytes]:
+ # pylint: disable=line-too-long
+ """Reads or downloads a file from the system, including its metadata and properties.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param range: Return file data only from the specified byte range. Default value is None.
+ :type range: str
+ :param range_get_content_md5: When this header is set to true and specified together with the
+ Range header, the service returns the MD5 hash for the range, as long as the range is less than
+ or equal to 4 MB in size. Default value is None.
+ :type range_get_content_md5: bool
+ :param structured_body_type: Specifies the response content should be returned as a structured
+ message and specifies the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: Iterator[bytes] or the result of cls(response)
+ :rtype: Iterator[bytes]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_download_request(
+ url=self._config.url,
+ timeout=timeout,
+ range=range,
+ range_get_content_md5=range_get_content_md5,
+ structured_body_type=structured_body_type,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _decompress = kwargs.pop("decompress", True)
+ _stream = True
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 206]:
+ try:
+ response.read() # Load the body in memory and close the socket
+ except (StreamConsumedError, StreamClosedError):
+ pass
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length"))
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["Content-Range"] = self._deserialize("str", response.headers.get("Content-Range"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding"))
+ response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control"))
+ response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition"))
+ response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Accept-Ranges"] = self._deserialize("str", response.headers.get("Accept-Ranges"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-completion-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-copy-completion-time")
+ )
+ response_headers["x-ms-copy-status-description"] = self._deserialize(
+ "str", response.headers.get("x-ms-copy-status-description")
+ )
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress"))
+ response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+ response_headers["x-ms-content-md5"] = self._deserialize("bytearray", response.headers.get("x-ms-content-md5"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+ response_headers["x-ms-structured-content-length"] = self._deserialize(
+ "int", response.headers.get("x-ms-structured-content-length")
+ )
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ deserialized = response.stream_download(self._client._pipeline, decompress=_decompress)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def get_properties( # pylint: disable=inconsistent-return-statements
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all user-defined metadata, standard HTTP properties, and system properties for the
+ file. It does not return the content of the file.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["x-ms-type"] = self._deserialize("str", response.headers.get("x-ms-type"))
+ response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length"))
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["Content-Encoding"] = self._deserialize("str", response.headers.get("Content-Encoding"))
+ response_headers["Cache-Control"] = self._deserialize("str", response.headers.get("Cache-Control"))
+ response_headers["Content-Disposition"] = self._deserialize("str", response.headers.get("Content-Disposition"))
+ response_headers["Content-Language"] = self._deserialize("str", response.headers.get("Content-Language"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-completion-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-copy-completion-time")
+ )
+ response_headers["x-ms-copy-status-description"] = self._deserialize(
+ "str", response.headers.get("x-ms-copy-status-description")
+ )
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-progress"] = self._deserialize("str", response.headers.get("x-ms-copy-progress"))
+ response_headers["x-ms-copy-source"] = self._deserialize("str", response.headers.get("x-ms-copy-source"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+ response_headers["x-ms-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-server-encrypted")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def delete( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """removes the file from the storage account.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_delete_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def set_http_headers( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ file_content_length: Optional[int] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ file_attributes: str = "none",
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ file_change_time: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets HTTP headers on the file.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_content_length: Resizes a file to the specified size. If the specified byte value
+ is less than the current size of the file, then all ranges above the specified byte value are
+ cleared. Default value is None.
+ :type file_content_length: int
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param file_attributes: If specified, the provided file attributes shall be set. Default value:
+ ‘Archive’ for file and ‘Directory’ for directory. ‘None’ can also be specified as default.
+ Default value is "none".
+ :type file_attributes: str
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param file_change_time: Change time for the file/directory. Default value: Now. Default value
+ is None.
+ :type file_change_time: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_content_type = None
+ _file_content_encoding = None
+ _file_content_language = None
+ _file_cache_control = None
+ _file_content_md5 = None
+ _file_content_disposition = None
+ _lease_id = None
+ if file_http_headers is not None:
+ _file_cache_control = file_http_headers.file_cache_control
+ _file_content_disposition = file_http_headers.file_content_disposition
+ _file_content_encoding = file_http_headers.file_content_encoding
+ _file_content_language = file_http_headers.file_content_language
+ _file_content_md5 = file_http_headers.file_content_md5
+ _file_content_type = file_http_headers.file_content_type
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_http_headers_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_content_length=file_content_length,
+ file_content_type=_file_content_type,
+ file_content_encoding=_file_content_encoding,
+ file_content_language=_file_content_language,
+ file_cache_control=_file_cache_control,
+ file_content_md5=_file_content_md5,
+ file_content_disposition=_file_content_disposition,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_attributes=file_attributes,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ file_change_time=file_change_time,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def set_metadata( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Updates user-defined metadata for the specified file.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def acquire_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a
+ lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease
+ duration cannot be changed using renew or change. Default value is None.
+ :type duration: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_acquire_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ duration=duration,
+ proposed_lease_id=proposed_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def release_lease( # pylint: disable=inconsistent-return-statements
+ self, lease_id: str, timeout: Optional[int] = None, request_id_parameter: Optional[str] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_release_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def change_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_change_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ proposed_lease_id=proposed_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def break_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """[Update] The Lease File operation establishes and manages a lock on a file for write and delete
+ operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_break_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ request_id_parameter=request_id_parameter,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def upload_range( # pylint: disable=inconsistent-return-statements
+ self,
+ range: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ file_range_write: Union[str, _models.FileRangeWriteType] = "update",
+ content_md5: Optional[bytes] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ structured_body_type: Optional[str] = None,
+ structured_content_length: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ optionalbody: Optional[IO[bytes]] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Upload a range of bytes to a file.
+
+ :param range: Specifies the range of bytes to be written. Both the start and end of the range
+ must be specified. For an update operation, the range can be up to 4 MB in size. For a clear
+ operation, the range can be up to the value of the file's full size. The File service accepts
+ only a single byte range for the Range and 'x-ms-range' headers, and the byte range must be
+ specified in the following format: bytes=startByte-endByte. Required.
+ :type range: str
+ :param content_length: Specifies the number of bytes being transmitted in the request body.
+ When the x-ms-write header is set to clear, the value of this header must be set to zero.
+ Required.
+ :type content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param file_range_write: Specify one of the following options: - Update: Writes the bytes
+ specified by the request body into the specified range. The Range and Content-Length headers
+ must match to perform the update. - Clear: Clears the specified range and releases the space
+ used in storage for that range. To clear a range, set the Content-Length header to zero, and
+ set the Range header to a value that indicates the range to clear, up to maximum file size.
+ Known values are: "update" and "clear". Default value is "update".
+ :type file_range_write: str or ~azure.storage.fileshare.models.FileRangeWriteType
+ :param content_md5: An MD5 hash of the content. This hash is used to verify the integrity of
+ the data during transport. When the Content-MD5 header is specified, the File service compares
+ the hash of the content that has arrived with the header value that was sent. If the two hashes
+ do not match, the operation will fail with error code 400 (Bad Request). Default value is None.
+ :type content_md5: bytes
+ :param file_last_written_mode: If the file last write time should be preserved or overwritten.
+ Known values are: "Now" and "Preserve". Default value is None.
+ :type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param structured_body_type: Required if the request body is a structured message. Specifies
+ the message schema version and properties. Default value is None.
+ :type structured_body_type: str
+ :param structured_content_length: Required if the request body is a structured message.
+ Specifies the length of the blob/file content inside the message body. Will always be smaller
+ than Content-Length. Default value is None.
+ :type structured_content_length: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :param optionalbody: Initial data. Default value is None.
+ :type optionalbody: IO[bytes]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/octet-stream"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+ _content = optionalbody
+
+ _request = build_upload_range_request(
+ url=self._config.url,
+ range=range,
+ content_length=content_length,
+ timeout=timeout,
+ file_range_write=file_range_write,
+ content_md5=content_md5,
+ lease_id=_lease_id,
+ file_last_written_mode=file_last_written_mode,
+ structured_body_type=structured_body_type,
+ structured_content_length=structured_content_length,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["Content-MD5"] = self._deserialize("bytearray", response.headers.get("Content-MD5"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-structured-body"] = self._deserialize(
+ "str", response.headers.get("x-ms-structured-body")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def upload_range_from_url( # pylint: disable=inconsistent-return-statements
+ self,
+ range: str,
+ copy_source: str,
+ content_length: int,
+ timeout: Optional[int] = None,
+ source_range: Optional[str] = None,
+ source_content_crc64: Optional[bytes] = None,
+ copy_source_authorization: Optional[str] = None,
+ file_last_written_mode: Optional[Union[str, _models.FileLastWrittenMode]] = None,
+ source_modified_access_conditions: Optional[_models.SourceModifiedAccessConditions] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Upload a range of bytes to a file where the contents are read from a URL.
+
+ :param range: Writes data to the specified byte range in the file. Required.
+ :type range: str
+ :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
+ a file to another file within the same storage account, you may use Shared Key to authenticate
+ the source file. If you are copying a file from another storage account, or if you are copying
+ a blob from the same storage account or another storage account, then you must authenticate the
+ source file or blob using a shared access signature. If the source is a public blob, no
+ authentication is required to perform the copy operation. A file in a share snapshot can also
+ be specified as a copy source. Required.
+ :type copy_source: str
+ :param content_length: Specifies the number of bytes being transmitted in the request body.
+ When the x-ms-write header is set to clear, the value of this header must be set to zero.
+ Required.
+ :type content_length: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param source_range: Bytes of source data in the specified range. Default value is None.
+ :type source_range: str
+ :param source_content_crc64: Specify the crc64 calculated for the range of bytes that must be
+ read from the copy source. Default value is None.
+ :type source_content_crc64: bytes
+ :param copy_source_authorization: Only Bearer type is supported. Credentials should be a valid
+ OAuth access token to copy source. Default value is None.
+ :type copy_source_authorization: str
+ :param file_last_written_mode: If the file last write time should be preserved or overwritten.
+ Known values are: "Now" and "Preserve". Default value is None.
+ :type file_last_written_mode: str or ~azure.storage.fileshare.models.FileLastWrittenMode
+ :param source_modified_access_conditions: Parameter group. Default value is None.
+ :type source_modified_access_conditions:
+ ~azure.storage.fileshare.models.SourceModifiedAccessConditions
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["range"] = kwargs.pop("comp", _params.pop("comp", "range"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_if_match_crc64 = None
+ _source_if_none_match_crc64 = None
+ _lease_id = None
+ if source_modified_access_conditions is not None:
+ _source_if_match_crc64 = source_modified_access_conditions.source_if_match_crc64
+ _source_if_none_match_crc64 = source_modified_access_conditions.source_if_none_match_crc64
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_upload_range_from_url_request(
+ url=self._config.url,
+ range=range,
+ copy_source=copy_source,
+ content_length=content_length,
+ timeout=timeout,
+ source_range=source_range,
+ source_content_crc64=source_content_crc64,
+ source_if_match_crc64=_source_if_match_crc64,
+ source_if_none_match_crc64=_source_if_none_match_crc64,
+ lease_id=_lease_id,
+ copy_source_authorization=copy_source_authorization,
+ file_last_written_mode=file_last_written_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ file_range_write_from_url=self._config.file_range_write_from_url,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-content-crc64"] = self._deserialize(
+ "bytearray", response.headers.get("x-ms-content-crc64")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_range_list(
+ self,
+ sharesnapshot: Optional[str] = None,
+ prevsharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ range: Optional[str] = None,
+ support_rename: Optional[bool] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> _models.ShareFileRangeList:
+ # pylint: disable=line-too-long
+ """Returns the list of valid ranges for a file.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param prevsharesnapshot: The previous snapshot parameter is an opaque DateTime value that,
+ when present, specifies the previous snapshot. Default value is None.
+ :type prevsharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param range: Specifies the range of bytes over which to list ranges, inclusively. Default
+ value is None.
+ :type range: str
+ :param support_rename: This header is allowed only when PrevShareSnapshot query parameter is
+ set. Determines whether the changed ranges for a file that has been renamed or moved between
+ the target snapshot (or the live file) and the previous snapshot should be listed. If the value
+ is true, the valid changed ranges for the file will be returned. If the value is false, the
+ operation will result in a failure with 409 (Conflict) response. The default value is false.
+ Default value is None.
+ :type support_rename: bool
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: ShareFileRangeList or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ShareFileRangeList
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rangelist"] = kwargs.pop("comp", _params.pop("comp", "rangelist"))
+ cls: ClsType[_models.ShareFileRangeList] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_range_list_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ prevsharesnapshot=prevsharesnapshot,
+ timeout=timeout,
+ range=range,
+ lease_id=_lease_id,
+ support_rename=support_rename,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["x-ms-content-length"] = self._deserialize("int", response.headers.get("x-ms-content-length"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ShareFileRangeList", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def start_copy( # pylint: disable=inconsistent-return-statements
+ self,
+ copy_source: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ file_mode: Optional[str] = None,
+ file_mode_copy_mode: Optional[Union[str, _models.ModeCopyMode]] = None,
+ file_owner_copy_mode: Optional[Union[str, _models.OwnerCopyMode]] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Copies a blob or file to a destination file within the storage account.
+
+ :param copy_source: Specifies the URL of the source file or blob, up to 2 KB in length. To copy
+ a file to another file within the same storage account, you may use Shared Key to authenticate
+ the source file. If you are copying a file from another storage account, or if you are copying
+ a blob from the same storage account or another storage account, then you must authenticate the
+ source file or blob using a shared access signature. If the source is a public blob, no
+ authentication is required to perform the copy operation. A file in a share snapshot can also
+ be specified as a copy source. Required.
+ :type copy_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param file_mode: Optional, NFS only. The file mode of the file or directory. Default value is
+ None.
+ :type file_mode: str
+ :param file_mode_copy_mode: NFS only. Applicable only when the copy source is a File.
+ Determines the copy behavior of the mode bits of the file. source: The mode on the destination
+ file is copied from the source file. override: The mode on the destination file is determined
+ via the x-ms-mode header. Known values are: "source" and "override". Default value is None.
+ :type file_mode_copy_mode: str or ~azure.storage.fileshare.models.ModeCopyMode
+ :param file_owner_copy_mode: NFS only. Determines the copy behavior of the owner user
+ identifier (UID) and group identifier (GID) of the file. source: The owner user identifier
+ (UID) and group identifier (GID) on the destination file is copied from the source file.
+ override: The owner user identifier (UID) and group identifier (GID) on the destination file is
+ determined via the x-ms-owner and x-ms-group headers. Known values are: "source" and
+ "override". Default value is None.
+ :type file_owner_copy_mode: str or ~azure.storage.fileshare.models.OwnerCopyMode
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = kwargs.pop("params", {}) or {}
+
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _file_permission_copy_mode = None
+ _ignore_read_only = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ _set_archive_attribute = None
+ _lease_id = None
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+ _file_permission_copy_mode = copy_file_smb_info.file_permission_copy_mode
+ _ignore_read_only = copy_file_smb_info.ignore_read_only
+ _set_archive_attribute = copy_file_smb_info.set_archive_attribute
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_start_copy_request(
+ url=self._config.url,
+ copy_source=copy_source,
+ timeout=timeout,
+ metadata=metadata,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ file_permission_copy_mode=_file_permission_copy_mode,
+ ignore_read_only=_ignore_read_only,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ set_archive_attribute=_set_archive_attribute,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_mode=file_mode,
+ file_mode_copy_mode=file_mode_copy_mode,
+ file_owner_copy_mode=file_owner_copy_mode,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-copy-id"] = self._deserialize("str", response.headers.get("x-ms-copy-id"))
+ response_headers["x-ms-copy-status"] = self._deserialize("str", response.headers.get("x-ms-copy-status"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def abort_copy( # pylint: disable=inconsistent-return-statements
+ self,
+ copy_id: str,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Aborts a pending Copy File operation, and leaves a destination file with zero length and full
+ metadata.
+
+ :param copy_id: The copy identifier provided in the x-ms-copy-id header of the original Copy
+ File operation. Required.
+ :type copy_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["copy"] = kwargs.pop("comp", _params.pop("comp", "copy"))
+ copy_action_abort_constant: Literal["abort"] = kwargs.pop(
+ "copy_action_abort_constant", _headers.pop("x-ms-copy-action", "abort")
+ )
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_abort_copy_request(
+ url=self._config.url,
+ copy_id=copy_id,
+ timeout=timeout,
+ lease_id=_lease_id,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ copy_action_abort_constant=copy_action_abort_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def list_handles(
+ self,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ **kwargs: Any
+ ) -> _models.ListHandlesResponse:
+ # pylint: disable=line-too-long
+ """Lists handles for file.
+
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :return: ListHandlesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListHandlesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["listhandles"] = kwargs.pop("comp", _params.pop("comp", "listhandles"))
+ cls: ClsType[_models.ListHandlesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_handles_request(
+ url=self._config.url,
+ marker=marker,
+ maxresults=maxresults,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["Content-Type"] = self._deserialize("str", response.headers.get("Content-Type"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ListHandlesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def force_close_handles( # pylint: disable=inconsistent-return-statements
+ self,
+ handle_id: str,
+ timeout: Optional[int] = None,
+ marker: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Closes all handles open for given file.
+
+ :param handle_id: Specifies handle ID opened on the file or directory to be closed. Asterisk
+ (‘*’) is a wildcard that specifies all handles. Required.
+ :type handle_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["forceclosehandles"] = kwargs.pop("comp", _params.pop("comp", "forceclosehandles"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_force_close_handles_request(
+ url=self._config.url,
+ handle_id=handle_id,
+ timeout=timeout,
+ marker=marker,
+ sharesnapshot=sharesnapshot,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-marker"] = self._deserialize("str", response.headers.get("x-ms-marker"))
+ response_headers["x-ms-number-of-handles-closed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-closed")
+ )
+ response_headers["x-ms-number-of-handles-failed"] = self._deserialize(
+ "int", response.headers.get("x-ms-number-of-handles-failed")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def rename( # pylint: disable=inconsistent-return-statements
+ self,
+ rename_source: str,
+ timeout: Optional[int] = None,
+ replace_if_exists: Optional[bool] = None,
+ ignore_read_only: Optional[bool] = None,
+ file_permission: str = "inherit",
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ file_permission_key: Optional[str] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ source_lease_access_conditions: Optional[_models.SourceLeaseAccessConditions] = None,
+ destination_lease_access_conditions: Optional[_models.DestinationLeaseAccessConditions] = None,
+ copy_file_smb_info: Optional[_models.CopyFileSmbInfo] = None,
+ file_http_headers: Optional[_models.FileHTTPHeaders] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Renames a file.
+
+ :param rename_source: Required. Specifies the URI-style path of the source file, up to 2 KB in
+ length. Required.
+ :type rename_source: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param replace_if_exists: Optional. A boolean value for if the destination file already exists,
+ whether this request will overwrite the file or not. If true, the rename will succeed and will
+ overwrite the destination file. If not provided or if false and the destination file does
+ exist, the request will not overwrite the destination file. If provided and the destination
+ file doesn’t exist, the rename will succeed. Note: This value does not override the
+ x-ms-file-copy-ignore-read-only header value. Default value is None.
+ :type replace_if_exists: bool
+ :param ignore_read_only: Optional. A boolean value that specifies whether the ReadOnly
+ attribute on a preexisting destination file should be respected. If true, the rename will
+ succeed, otherwise, a previous file at the destination with the ReadOnly attribute set will
+ cause the rename to fail. Default value is None.
+ :type ignore_read_only: bool
+ :param file_permission: If specified the permission (security descriptor) shall be set for the
+ directory/file. This header can be used if Permission size is <= 8KB, else
+ x-ms-file-permission-key header shall be used. Default value: Inherit. If SDDL is specified as
+ input, it must have owner, group and dacl. Note: Only one of the x-ms-file-permission or
+ x-ms-file-permission-key should be specified. Default value is "inherit".
+ :type file_permission: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param file_permission_key: Key of the permission to be set for the directory/file. Note: Only
+ one of the x-ms-file-permission or x-ms-file-permission-key should be specified. Default value
+ is None.
+ :type file_permission_key: str
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param source_lease_access_conditions: Parameter group. Default value is None.
+ :type source_lease_access_conditions:
+ ~azure.storage.fileshare.models.SourceLeaseAccessConditions
+ :param destination_lease_access_conditions: Parameter group. Default value is None.
+ :type destination_lease_access_conditions:
+ ~azure.storage.fileshare.models.DestinationLeaseAccessConditions
+ :param copy_file_smb_info: Parameter group. Default value is None.
+ :type copy_file_smb_info: ~azure.storage.fileshare.models.CopyFileSmbInfo
+ :param file_http_headers: Parameter group. Default value is None.
+ :type file_http_headers: ~azure.storage.fileshare.models.FileHTTPHeaders
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["rename"] = kwargs.pop("comp", _params.pop("comp", "rename"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _source_lease_id = None
+ _destination_lease_id = None
+ _file_attributes = None
+ _file_creation_time = None
+ _file_last_write_time = None
+ _file_change_time = None
+ _file_content_type = None
+ if source_lease_access_conditions is not None:
+ _source_lease_id = source_lease_access_conditions.source_lease_id
+ if destination_lease_access_conditions is not None:
+ _destination_lease_id = destination_lease_access_conditions.destination_lease_id
+ if copy_file_smb_info is not None:
+ _file_attributes = copy_file_smb_info.file_attributes
+ _file_change_time = copy_file_smb_info.file_change_time
+ _file_creation_time = copy_file_smb_info.file_creation_time
+ _file_last_write_time = copy_file_smb_info.file_last_write_time
+ if file_http_headers is not None:
+ _file_content_type = file_http_headers.file_content_type
+
+ _request = build_rename_request(
+ url=self._config.url,
+ rename_source=rename_source,
+ timeout=timeout,
+ replace_if_exists=replace_if_exists,
+ ignore_read_only=ignore_read_only,
+ source_lease_id=_source_lease_id,
+ destination_lease_id=_destination_lease_id,
+ file_attributes=_file_attributes,
+ file_creation_time=_file_creation_time,
+ file_last_write_time=_file_last_write_time,
+ file_change_time=_file_change_time,
+ file_permission=file_permission,
+ file_permission_format=file_permission_format,
+ file_permission_key=file_permission_key,
+ metadata=metadata,
+ file_content_type=_file_content_type,
+ allow_trailing_dot=self._config.allow_trailing_dot,
+ allow_source_trailing_dot=self._config.allow_source_trailing_dot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-request-server-encrypted"] = self._deserialize(
+ "bool", response.headers.get("x-ms-request-server-encrypted")
+ )
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+ response_headers["x-ms-file-attributes"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-attributes")
+ )
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def create_symbolic_link( # pylint: disable=inconsistent-return-statements
+ self,
+ link_text: str,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_creation_time: str = "now",
+ file_last_write_time: str = "now",
+ request_id_parameter: Optional[str] = None,
+ owner: Optional[str] = None,
+ group: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a symbolic link.
+
+ :param link_text: NFS only. Required. The path to the original file, the symbolic link is
+ pointing to. The path is of type string which is not resolved and is stored as is. The path can
+ be absolute path or the relative path depending on the content stored in the symbolic link
+ file. Required.
+ :type link_text: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param file_creation_time: Creation time for the file/directory. Default value: Now. Default
+ value is "now".
+ :type file_creation_time: str
+ :param file_last_write_time: Last write time for the file/directory. Default value: Now.
+ Default value is "now".
+ :type file_last_write_time: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param owner: Optional, NFS only. The owner of the file or directory. Default value is None.
+ :type owner: str
+ :param group: Optional, NFS only. The owning group of the file or directory. Default value is
+ None.
+ :type group: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_symbolic_link_request(
+ url=self._config.url,
+ link_text=link_text,
+ timeout=timeout,
+ metadata=metadata,
+ file_creation_time=file_creation_time,
+ file_last_write_time=file_last_write_time,
+ request_id_parameter=request_id_parameter,
+ lease_id=_lease_id,
+ owner=owner,
+ group=group,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_symbolic_link( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """get_symbolic_link.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["symboliclink"] = kwargs.pop("restype", _params.pop("restype", "symboliclink"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_get_symbolic_link_request(
+ url=self._config.url,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-link-text"] = self._deserialize("str", response.headers.get("x-ms-link-text"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def create_hard_link( # pylint: disable=inconsistent-return-statements
+ self,
+ target_file: str,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a hard link.
+
+ :param target_file: NFS only. Required. Specifies the path of the target file to which the link
+ will be created, up to 2 KiB in length. It should be full path of the target from the root.The
+ target file must be in the same share and hence the same storage account. Required.
+ :type target_file: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["hardlink"] = kwargs.pop("restype", _params.pop("restype", "hardlink"))
+ file_type_constant: Literal["file"] = kwargs.pop("file_type_constant", _headers.pop("x-ms-type", "file"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_create_hard_link_request(
+ url=self._config.url,
+ target_file=target_file,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ file_type_constant=file_type_constant,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-creation-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-creation-time")
+ )
+ response_headers["x-ms-file-last-write-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-last-write-time")
+ )
+ response_headers["x-ms-file-change-time"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-change-time")
+ )
+ response_headers["x-ms-file-id"] = self._deserialize("str", response.headers.get("x-ms-file-id"))
+ response_headers["x-ms-file-parent-id"] = self._deserialize("str", response.headers.get("x-ms-file-parent-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-link-count"] = self._deserialize("int", response.headers.get("x-ms-link-count"))
+ response_headers["x-ms-mode"] = self._deserialize("str", response.headers.get("x-ms-mode"))
+ response_headers["x-ms-owner"] = self._deserialize("str", response.headers.get("x-ms-owner"))
+ response_headers["x-ms-group"] = self._deserialize("str", response.headers.get("x-ms-group"))
+ response_headers["x-ms-file-file-type"] = self._deserialize("str", response.headers.get("x-ms-file-file-type"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_patch.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_patch.py
new file mode 100644
index 00000000..f7dd3251
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_patch.py
@@ -0,0 +1,20 @@
+# ------------------------------------
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+# ------------------------------------
+"""Customize generated code here.
+
+Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
+"""
+from typing import List
+
+__all__: List[str] = [] # Add all objects you want publicly available to users at this package level
+
+
+def patch_sdk():
+ """Do not remove from this file.
+
+ `patch_sdk` is a last resort escape hatch that allows you to do customizations
+ you can't accomplish using the techniques described in
+ https://aka.ms/azsdk/python/dpcodegen/python/customize
+ """
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_service_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_service_operations.py
new file mode 100644
index 00000000..57e5b246
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_service_operations.py
@@ -0,0 +1,410 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import sys
+from typing import Any, Callable, Dict, List, Literal, Optional, TypeVar, Union
+
+from azure.core import PipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._configuration import AzureFileStorageConfiguration
+from .._serialization import Deserializer, Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_set_properties_request(
+ url: str,
+ *,
+ content: Any,
+ timeout: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs)
+
+
+def build_get_properties_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_list_shares_segment_request(
+ url: str,
+ *,
+ prefix: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListSharesIncludeType]]] = None,
+ timeout: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if prefix is not None:
+ _params["prefix"] = _SERIALIZER.query("prefix", prefix, "str")
+ if marker is not None:
+ _params["marker"] = _SERIALIZER.query("marker", marker, "str")
+ if maxresults is not None:
+ _params["maxresults"] = _SERIALIZER.query("maxresults", maxresults, "int", minimum=1)
+ if include is not None:
+ _params["include"] = _SERIALIZER.query("include", include, "[str]", div=",")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class ServiceOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.AzureFileStorage`'s
+ :attr:`service` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def set_properties( # pylint: disable=inconsistent-return-statements
+ self, storage_service_properties: _models.StorageServiceProperties, timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties for a storage account's File service endpoint, including properties for Storage
+ Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param storage_service_properties: The StorageService properties. Required.
+ :type storage_service_properties: ~azure.storage.fileshare.models.StorageServiceProperties
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _content = self._serialize.body(storage_service_properties, "StorageServiceProperties", is_xml=True)
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_properties(self, timeout: Optional[int] = None, **kwargs: Any) -> _models.StorageServiceProperties:
+ # pylint: disable=line-too-long
+ """Gets the properties of a storage account's File service, including properties for Storage
+ Analytics metrics and CORS (Cross-Origin Resource Sharing) rules.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: StorageServiceProperties or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.StorageServiceProperties
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["service"] = kwargs.pop("restype", _params.pop("restype", "service"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[_models.StorageServiceProperties] = kwargs.pop("cls", None)
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ deserialized = self._deserialize("StorageServiceProperties", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def list_shares_segment(
+ self,
+ prefix: Optional[str] = None,
+ marker: Optional[str] = None,
+ maxresults: Optional[int] = None,
+ include: Optional[List[Union[str, _models.ListSharesIncludeType]]] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.ListSharesResponse:
+ # pylint: disable=line-too-long
+ """The List Shares Segment operation returns a list of the shares and share snapshots under the
+ specified account.
+
+ :param prefix: Filters the results to return only entries whose name begins with the specified
+ prefix. Default value is None.
+ :type prefix: str
+ :param marker: A string value that identifies the portion of the list to be returned with the
+ next list operation. The operation returns a marker value within the response body if the list
+ returned was not complete. The marker value may then be used in a subsequent call to request
+ the next set of list items. The marker value is opaque to the client. Default value is None.
+ :type marker: str
+ :param maxresults: Specifies the maximum number of entries to return. If the request does not
+ specify maxresults, or specifies a value greater than 5,000, the server will return up to 5,000
+ items. Default value is None.
+ :type maxresults: int
+ :param include: Include this parameter to specify one or more datasets to include in the
+ response. Default value is None.
+ :type include: list[str or ~azure.storage.fileshare.models.ListSharesIncludeType]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: ListSharesResponse or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ListSharesResponse
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["list"] = kwargs.pop("comp", _params.pop("comp", "list"))
+ cls: ClsType[_models.ListSharesResponse] = kwargs.pop("cls", None)
+
+ _request = build_list_shares_segment_request(
+ url=self._config.url,
+ prefix=prefix,
+ marker=marker,
+ maxresults=maxresults,
+ include=include,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+
+ deserialized = self._deserialize("ListSharesResponse", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_share_operations.py b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_share_operations.py
new file mode 100644
index 00000000..920a64e2
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/operations/_share_operations.py
@@ -0,0 +1,2595 @@
+# pylint: disable=too-many-lines
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from io import IOBase
+import sys
+from typing import Any, Callable, Dict, IO, List, Literal, Optional, TypeVar, Union, overload
+
+from azure.core import PipelineClient
+from azure.core.exceptions import (
+ ClientAuthenticationError,
+ HttpResponseError,
+ ResourceExistsError,
+ ResourceNotFoundError,
+ ResourceNotModifiedError,
+ map_error,
+)
+from azure.core.pipeline import PipelineResponse
+from azure.core.rest import HttpRequest, HttpResponse
+from azure.core.tracing.decorator import distributed_trace
+from azure.core.utils import case_insensitive_dict
+
+from .. import models as _models
+from .._configuration import AzureFileStorageConfiguration
+from .._serialization import Deserializer, Serializer
+
+if sys.version_info >= (3, 9):
+ from collections.abc import MutableMapping
+else:
+ from typing import MutableMapping # type: ignore
+T = TypeVar("T")
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+_SERIALIZER = Serializer()
+_SERIALIZER.client_side_validation = False
+
+
+def build_create_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ enabled_protocols: Optional[str] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ if quota is not None:
+ _headers["x-ms-share-quota"] = _SERIALIZER.header("quota", quota, "int", minimum=1)
+ if access_tier is not None:
+ _headers["x-ms-access-tier"] = _SERIALIZER.header("access_tier", access_tier, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if enabled_protocols is not None:
+ _headers["x-ms-enabled-protocols"] = _SERIALIZER.header("enabled_protocols", enabled_protocols, "str")
+ if root_squash is not None:
+ _headers["x-ms-root-squash"] = _SERIALIZER.header("root_squash", root_squash, "str")
+ if enable_snapshot_virtual_directory_access is not None:
+ _headers["x-ms-enable-snapshot-virtual-directory-access"] = _SERIALIZER.header(
+ "enable_snapshot_virtual_directory_access", enable_snapshot_virtual_directory_access, "bool"
+ )
+ if paid_bursting_enabled is not None:
+ _headers["x-ms-share-paid-bursting-enabled"] = _SERIALIZER.header(
+ "paid_bursting_enabled", paid_bursting_enabled, "bool"
+ )
+ if paid_bursting_max_bandwidth_mibps is not None:
+ _headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = _SERIALIZER.header(
+ "paid_bursting_max_bandwidth_mibps", paid_bursting_max_bandwidth_mibps, "int"
+ )
+ if paid_bursting_max_iops is not None:
+ _headers["x-ms-share-paid-bursting-max-iops"] = _SERIALIZER.header(
+ "paid_bursting_max_iops", paid_bursting_max_iops, "int"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if share_provisioned_iops is not None:
+ _headers["x-ms-share-provisioned-iops"] = _SERIALIZER.header(
+ "share_provisioned_iops", share_provisioned_iops, "int"
+ )
+ if share_provisioned_bandwidth_mibps is not None:
+ _headers["x-ms-share-provisioned-bandwidth-mibps"] = _SERIALIZER.header(
+ "share_provisioned_bandwidth_mibps", share_provisioned_bandwidth_mibps, "int"
+ )
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_properties_request(
+ url: str,
+ *,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_delete_request(
+ url: str,
+ *,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if delete_snapshots is not None:
+ _headers["x-ms-delete-snapshots"] = _SERIALIZER.header("delete_snapshots", delete_snapshots, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_acquire_lease_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ if duration is not None:
+ _headers["x-ms-lease-duration"] = _SERIALIZER.header("duration", duration, "int")
+ if proposed_lease_id is not None:
+ _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_release_lease_request(
+ url: str,
+ *,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_change_lease_request(
+ url: str,
+ *,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if proposed_lease_id is not None:
+ _headers["x-ms-proposed-lease-id"] = _SERIALIZER.header("proposed_lease_id", proposed_lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_renew_lease_request(
+ url: str,
+ *,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_break_lease_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ break_period: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+ if sharesnapshot is not None:
+ _params["sharesnapshot"] = _SERIALIZER.query("sharesnapshot", sharesnapshot, "str")
+
+ # Construct headers
+ _headers["x-ms-lease-action"] = _SERIALIZER.header("action", action, "str")
+ if break_period is not None:
+ _headers["x-ms-lease-break-period"] = _SERIALIZER.header("break_period", break_period, "int")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_snapshot_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_create_permission_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_permission_request(
+ url: str,
+ *,
+ file_permission_key: str,
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ timeout: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/json")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-file-permission-key"] = _SERIALIZER.header("file_permission_key", file_permission_key, "str")
+ if file_permission_format is not None:
+ _headers["x-ms-file-permission-format"] = _SERIALIZER.header(
+ "file_permission_format", file_permission_format, "str"
+ )
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_properties_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ lease_id: Optional[str] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if quota is not None:
+ _headers["x-ms-share-quota"] = _SERIALIZER.header("quota", quota, "int", minimum=1)
+ if access_tier is not None:
+ _headers["x-ms-access-tier"] = _SERIALIZER.header("access_tier", access_tier, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if root_squash is not None:
+ _headers["x-ms-root-squash"] = _SERIALIZER.header("root_squash", root_squash, "str")
+ if enable_snapshot_virtual_directory_access is not None:
+ _headers["x-ms-enable-snapshot-virtual-directory-access"] = _SERIALIZER.header(
+ "enable_snapshot_virtual_directory_access", enable_snapshot_virtual_directory_access, "bool"
+ )
+ if paid_bursting_enabled is not None:
+ _headers["x-ms-share-paid-bursting-enabled"] = _SERIALIZER.header(
+ "paid_bursting_enabled", paid_bursting_enabled, "bool"
+ )
+ if paid_bursting_max_bandwidth_mibps is not None:
+ _headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = _SERIALIZER.header(
+ "paid_bursting_max_bandwidth_mibps", paid_bursting_max_bandwidth_mibps, "int"
+ )
+ if paid_bursting_max_iops is not None:
+ _headers["x-ms-share-paid-bursting-max-iops"] = _SERIALIZER.header(
+ "paid_bursting_max_iops", paid_bursting_max_iops, "int"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if share_provisioned_iops is not None:
+ _headers["x-ms-share-provisioned-iops"] = _SERIALIZER.header(
+ "share_provisioned_iops", share_provisioned_iops, "int"
+ )
+ if share_provisioned_bandwidth_mibps is not None:
+ _headers["x-ms-share-provisioned-bandwidth-mibps"] = _SERIALIZER.header(
+ "share_provisioned_bandwidth_mibps", share_provisioned_bandwidth_mibps, "int"
+ )
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_metadata_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ if metadata is not None:
+ _headers["x-ms-meta"] = _SERIALIZER.header("metadata", metadata, "{str}")
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_get_access_policy_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_set_access_policy_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ content: Any = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ if content_type is not None:
+ _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, content=content, **kwargs)
+
+
+def build_get_statistics_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ lease_id: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if lease_id is not None:
+ _headers["x-ms-lease-id"] = _SERIALIZER.header("lease_id", lease_id, "str")
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+def build_restore_request(
+ url: str,
+ *,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ deleted_share_name: Optional[str] = None,
+ deleted_share_version: Optional[str] = None,
+ file_request_intent: Optional[Union[str, _models.ShareTokenIntent]] = None,
+ **kwargs: Any
+) -> HttpRequest:
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
+ version: Literal["2025-05-05"] = kwargs.pop("version", _headers.pop("x-ms-version", "2025-05-05"))
+ accept = _headers.pop("Accept", "application/xml")
+
+ # Construct URL
+ _url = kwargs.pop("template_url", "{url}")
+ path_format_arguments = {
+ "url": _SERIALIZER.url("url", url, "str", skip_quote=True),
+ }
+
+ _url: str = _url.format(**path_format_arguments) # type: ignore
+
+ # Construct parameters
+ _params["restype"] = _SERIALIZER.query("restype", restype, "str")
+ _params["comp"] = _SERIALIZER.query("comp", comp, "str")
+ if timeout is not None:
+ _params["timeout"] = _SERIALIZER.query("timeout", timeout, "int", minimum=0)
+
+ # Construct headers
+ _headers["x-ms-version"] = _SERIALIZER.header("version", version, "str")
+ if request_id_parameter is not None:
+ _headers["x-ms-client-request-id"] = _SERIALIZER.header("request_id_parameter", request_id_parameter, "str")
+ if deleted_share_name is not None:
+ _headers["x-ms-deleted-share-name"] = _SERIALIZER.header("deleted_share_name", deleted_share_name, "str")
+ if deleted_share_version is not None:
+ _headers["x-ms-deleted-share-version"] = _SERIALIZER.header(
+ "deleted_share_version", deleted_share_version, "str"
+ )
+ if file_request_intent is not None:
+ _headers["x-ms-file-request-intent"] = _SERIALIZER.header("file_request_intent", file_request_intent, "str")
+ _headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
+
+ return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
+
+
+class ShareOperations:
+ """
+ .. warning::
+ **DO NOT** instantiate this class directly.
+
+ Instead, you should access the following operations through
+ :class:`~azure.storage.fileshare.AzureFileStorage`'s
+ :attr:`share` attribute.
+ """
+
+ models = _models
+
+ def __init__(self, *args, **kwargs):
+ input_args = list(args)
+ self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client")
+ self._config: AzureFileStorageConfiguration = input_args.pop(0) if input_args else kwargs.pop("config")
+ self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer")
+ self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer")
+
+ @distributed_trace
+ def create( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ enabled_protocols: Optional[str] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a new share under the specified account. If the share with the same name already
+ exists, the operation fails.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
+ :type quota: int
+ :param access_tier: Specifies the access tier of the share. Known values are:
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
+ :type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
+ :param enabled_protocols: Protocols to enable on the share. Default value is None.
+ :type enabled_protocols: str
+ :param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
+ are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
+ :type root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :param enable_snapshot_virtual_directory_access: Default value is None.
+ :type enable_snapshot_virtual_directory_access: bool
+ :param paid_bursting_enabled: Optional. Boolean. Default if not specified is false. This
+ property enables paid bursting. Default value is None.
+ :type paid_bursting_enabled: bool
+ :param paid_bursting_max_bandwidth_mibps: Optional. Integer. Default if not specified is the
+ maximum throughput the file share can support. Current maximum for a file share is 10,340
+ MiB/sec. Default value is None.
+ :type paid_bursting_max_bandwidth_mibps: int
+ :param paid_bursting_max_iops: Optional. Integer. Default if not specified is the maximum IOPS
+ the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
+ None.
+ :type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ quota=quota,
+ access_tier=access_tier,
+ enabled_protocols=enabled_protocols,
+ root_squash=root_squash,
+ enable_snapshot_virtual_directory_access=enable_snapshot_virtual_directory_access,
+ paid_bursting_enabled=paid_bursting_enabled,
+ paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
+ paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_properties( # pylint: disable=inconsistent-return-statements
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Returns all user-defined metadata and system properties for the specified share or share
+ snapshot. The data returned does not include the share's list of files.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_properties_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-meta"] = self._deserialize("{str}", response.headers.get("x-ms-meta"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-ingress-mbps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-ingress-mbps")
+ )
+ response_headers["x-ms-share-provisioned-egress-mbps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-egress-mbps")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-lease-duration"] = self._deserialize("str", response.headers.get("x-ms-lease-duration"))
+ response_headers["x-ms-lease-state"] = self._deserialize("str", response.headers.get("x-ms-lease-state"))
+ response_headers["x-ms-lease-status"] = self._deserialize("str", response.headers.get("x-ms-lease-status"))
+ response_headers["x-ms-access-tier"] = self._deserialize("str", response.headers.get("x-ms-access-tier"))
+ response_headers["x-ms-access-tier-change-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-access-tier-change-time")
+ )
+ response_headers["x-ms-access-tier-transition-state"] = self._deserialize(
+ "str", response.headers.get("x-ms-access-tier-transition-state")
+ )
+ response_headers["x-ms-enabled-protocols"] = self._deserialize(
+ "str", response.headers.get("x-ms-enabled-protocols")
+ )
+ response_headers["x-ms-root-squash"] = self._deserialize("str", response.headers.get("x-ms-root-squash"))
+ response_headers["x-ms-enable-snapshot-virtual-directory-access"] = self._deserialize(
+ "bool", response.headers.get("x-ms-enable-snapshot-virtual-directory-access")
+ )
+ response_headers["x-ms-share-paid-bursting-enabled"] = self._deserialize(
+ "bool", response.headers.get("x-ms-share-paid-bursting-enabled")
+ )
+ response_headers["x-ms-share-paid-bursting-max-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-paid-bursting-max-iops")
+ )
+ response_headers["x-ms-share-paid-bursting-max-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-paid-bursting-max-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def delete( # pylint: disable=inconsistent-return-statements
+ self,
+ sharesnapshot: Optional[str] = None,
+ timeout: Optional[int] = None,
+ delete_snapshots: Optional[Union[str, _models.DeleteSnapshotsOptionType]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Operation marks the specified share or share snapshot for deletion. The share or share snapshot
+ and any files contained within it are later deleted during garbage collection.
+
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param delete_snapshots: Specifies the option include to delete the base share and all of its
+ snapshots. Known values are: "include" and "include-leased". Default value is None.
+ :type delete_snapshots: str or ~azure.storage.fileshare.models.DeleteSnapshotsOptionType
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_delete_request(
+ url=self._config.url,
+ sharesnapshot=sharesnapshot,
+ timeout=timeout,
+ delete_snapshots=delete_snapshots,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-share-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-usage-bytes")
+ )
+ response_headers["x-ms-file-share-snapshot-usage-bytes"] = self._deserialize(
+ "int", response.headers.get("x-ms-file-share-snapshot-usage-bytes")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def acquire_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ duration: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param duration: Specifies the duration of the lease, in seconds, or negative one (-1) for a
+ lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease
+ duration cannot be changed using renew or change. Default value is None.
+ :type duration: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["acquire"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "acquire"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_acquire_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ duration=duration,
+ proposed_lease_id=proposed_lease_id,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def release_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["release"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "release"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_release_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def change_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ proposed_lease_id: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param proposed_lease_id: Proposed lease ID, in a GUID string format. The File service returns
+ 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid
+ Constructor (String) for a list of valid GUID string formats. Default value is None.
+ :type proposed_lease_id: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["change"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "change"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_change_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ proposed_lease_id=proposed_lease_id,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def renew_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ lease_id: str,
+ timeout: Optional[int] = None,
+ sharesnapshot: Optional[str] = None,
+ request_id_parameter: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param lease_id: Specifies the current lease ID on the resource. Required.
+ :type lease_id: str
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["renew"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "renew"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_renew_lease_request(
+ url=self._config.url,
+ lease_id=lease_id,
+ timeout=timeout,
+ sharesnapshot=sharesnapshot,
+ request_id_parameter=request_id_parameter,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def break_lease( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ break_period: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ sharesnapshot: Optional[str] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """The Lease Share operation establishes and manages a lock on a share, or the specified snapshot
+ for set and delete share operations.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param break_period: For a break operation, proposed duration the lease should continue before
+ it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter
+ than the time remaining on the lease. If longer, the time remaining on the lease is used. A new
+ lease will not be available before the break period has expired, but the lease may be held for
+ longer than the break period. If this header does not appear with a break operation, a
+ fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease
+ breaks immediately. Default value is None.
+ :type break_period: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param sharesnapshot: The snapshot parameter is an opaque DateTime value that, when present,
+ specifies the share snapshot to query. Default value is None.
+ :type sharesnapshot: str
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ comp: Literal["lease"] = kwargs.pop("comp", _params.pop("comp", "lease"))
+ action: Literal["break"] = kwargs.pop("action", _headers.pop("x-ms-lease-action", "break"))
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_break_lease_request(
+ url=self._config.url,
+ timeout=timeout,
+ break_period=break_period,
+ lease_id=_lease_id,
+ request_id_parameter=request_id_parameter,
+ sharesnapshot=sharesnapshot,
+ file_request_intent=self._config.file_request_intent,
+ comp=comp,
+ action=action,
+ restype=restype,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-lease-time"] = self._deserialize("int", response.headers.get("x-ms-lease-time"))
+ response_headers["x-ms-lease-id"] = self._deserialize("str", response.headers.get("x-ms-lease-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def create_snapshot( # pylint: disable=inconsistent-return-statements
+ self, timeout: Optional[int] = None, metadata: Optional[Dict[str, str]] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Creates a read-only snapshot of a share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["snapshot"] = kwargs.pop("comp", _params.pop("comp", "snapshot"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_create_snapshot_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-snapshot"] = self._deserialize("str", response.headers.get("x-ms-snapshot"))
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @overload
+ def create_permission(
+ self,
+ share_permission: _models.SharePermission,
+ timeout: Optional[int] = None,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Required.
+ :type share_permission: ~azure.storage.fileshare.models.SharePermission
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @overload
+ def create_permission(
+ self,
+ share_permission: IO[bytes],
+ timeout: Optional[int] = None,
+ *,
+ content_type: str = "application/json",
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Required.
+ :type share_permission: IO[bytes]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :keyword content_type: Body Parameter content-type. Content type parameter for binary body.
+ Default value is "application/json".
+ :paramtype content_type: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+
+ @distributed_trace
+ def create_permission( # pylint: disable=inconsistent-return-statements
+ self, share_permission: Union[_models.SharePermission, IO[bytes]], timeout: Optional[int] = None, **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Create a permission (a security descriptor).
+
+ :param share_permission: A permission (a security descriptor) at the share level. Is either a
+ SharePermission type or a IO[bytes] type. Required.
+ :type share_permission: ~azure.storage.fileshare.models.SharePermission or IO[bytes]
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ content_type = content_type or "application/json"
+ _json = None
+ _content = None
+ if isinstance(share_permission, (IOBase, bytes)):
+ _content = share_permission
+ else:
+ _json = self._serialize.body(share_permission, "SharePermission")
+
+ _request = build_create_permission_request(
+ url=self._config.url,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ json=_json,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-file-permission-key"] = self._deserialize(
+ "str", response.headers.get("x-ms-file-permission-key")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_permission(
+ self,
+ file_permission_key: str,
+ file_permission_format: Optional[Union[str, _models.FilePermissionFormat]] = None,
+ timeout: Optional[int] = None,
+ **kwargs: Any
+ ) -> _models.SharePermission:
+ # pylint: disable=line-too-long
+ """Returns the permission (security descriptor) for a given key.
+
+ :param file_permission_key: Key of the permission to be set for the directory/file. Required.
+ :type file_permission_key: str
+ :param file_permission_format: Optional. Available for version 2023-06-01 and later. Specifies
+ the format in which the permission is returned. Acceptable values are SDDL or binary. If
+ x-ms-file-permission-format is unspecified or explicitly set to SDDL, the permission is
+ returned in SDDL format. If x-ms-file-permission-format is explicitly set to binary, the
+ permission is returned as a base64 string representing the binary encoding of the permission.
+ Known values are: "Sddl" and "Binary". Default value is None.
+ :type file_permission_format: str or ~azure.storage.fileshare.models.FilePermissionFormat
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :return: SharePermission or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.SharePermission
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["filepermission"] = kwargs.pop("comp", _params.pop("comp", "filepermission"))
+ cls: ClsType[_models.SharePermission] = kwargs.pop("cls", None)
+
+ _request = build_get_permission_request(
+ url=self._config.url,
+ file_permission_key=file_permission_key,
+ file_permission_format=file_permission_format,
+ timeout=timeout,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("SharePermission", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def set_properties( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ quota: Optional[int] = None,
+ access_tier: Optional[Union[str, _models.ShareAccessTier]] = None,
+ root_squash: Optional[Union[str, _models.ShareRootSquash]] = None,
+ enable_snapshot_virtual_directory_access: Optional[bool] = None,
+ paid_bursting_enabled: Optional[bool] = None,
+ paid_bursting_max_bandwidth_mibps: Optional[int] = None,
+ paid_bursting_max_iops: Optional[int] = None,
+ share_provisioned_iops: Optional[int] = None,
+ share_provisioned_bandwidth_mibps: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets properties for the specified share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param quota: Specifies the maximum size of the share, in gigabytes. Default value is None.
+ :type quota: int
+ :param access_tier: Specifies the access tier of the share. Known values are:
+ "TransactionOptimized", "Hot", "Cool", and "Premium". Default value is None.
+ :type access_tier: str or ~azure.storage.fileshare.models.ShareAccessTier
+ :param root_squash: Root squash to set on the share. Only valid for NFS shares. Known values
+ are: "NoRootSquash", "RootSquash", and "AllSquash". Default value is None.
+ :type root_squash: str or ~azure.storage.fileshare.models.ShareRootSquash
+ :param enable_snapshot_virtual_directory_access: Default value is None.
+ :type enable_snapshot_virtual_directory_access: bool
+ :param paid_bursting_enabled: Optional. Boolean. Default if not specified is false. This
+ property enables paid bursting. Default value is None.
+ :type paid_bursting_enabled: bool
+ :param paid_bursting_max_bandwidth_mibps: Optional. Integer. Default if not specified is the
+ maximum throughput the file share can support. Current maximum for a file share is 10,340
+ MiB/sec. Default value is None.
+ :type paid_bursting_max_bandwidth_mibps: int
+ :param paid_bursting_max_iops: Optional. Integer. Default if not specified is the maximum IOPS
+ the file share can support. Current maximum for a file share is 102,400 IOPS. Default value is
+ None.
+ :type paid_bursting_max_iops: int
+ :param share_provisioned_iops: Optional. Supported in version 2025-01-05 and later. Only
+ allowed for provisioned v2 file shares. Specifies the provisioned number of input/output
+ operations per second (IOPS) of the share. If this is not specified, the provisioned IOPS is
+ set to value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_iops: int
+ :param share_provisioned_bandwidth_mibps: Optional. Supported in version 2025-01-05 and later.
+ Only allowed for provisioned v2 file shares. Specifies the provisioned bandwidth of the share,
+ in mebibytes per second (MiBps). If this is not specified, the provisioned bandwidth is set to
+ value calculated based on recommendation formula. Default value is None.
+ :type share_provisioned_bandwidth_mibps: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["properties"] = kwargs.pop("comp", _params.pop("comp", "properties"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_properties_request(
+ url=self._config.url,
+ timeout=timeout,
+ quota=quota,
+ access_tier=access_tier,
+ lease_id=_lease_id,
+ root_squash=root_squash,
+ enable_snapshot_virtual_directory_access=enable_snapshot_virtual_directory_access,
+ paid_bursting_enabled=paid_bursting_enabled,
+ paid_bursting_max_bandwidth_mibps=paid_bursting_max_bandwidth_mibps,
+ paid_bursting_max_iops=paid_bursting_max_iops,
+ share_provisioned_iops=share_provisioned_iops,
+ share_provisioned_bandwidth_mibps=share_provisioned_bandwidth_mibps,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+ response_headers["x-ms-share-next-allowed-quota-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-quota-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-iops-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-iops-downgrade-time")
+ )
+ response_headers["x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time"] = self._deserialize(
+ "rfc-1123", response.headers.get("x-ms-share-next-allowed-provisioned-bandwidth-downgrade-time")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def set_metadata( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ metadata: Optional[Dict[str, str]] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets one or more user-defined name-value pairs for the specified share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param metadata: A name-value pair to associate with a file storage object. Default value is
+ None.
+ :type metadata: dict[str, str]
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["metadata"] = kwargs.pop("comp", _params.pop("comp", "metadata"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_set_metadata_request(
+ url=self._config.url,
+ timeout=timeout,
+ metadata=metadata,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_access_policy(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> List[_models.SignedIdentifier]:
+ # pylint: disable=line-too-long
+ """Returns information about stored access policies specified on the share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: list of SignedIdentifier or the result of cls(response)
+ :rtype: list[~azure.storage.fileshare.models.SignedIdentifier]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ cls: ClsType[List[_models.SignedIdentifier]] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_access_policy_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("[SignedIdentifier]", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def set_access_policy( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ share_acl: Optional[List[_models.SignedIdentifier]] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Sets a stored access policy for use with shared access signatures.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :param share_acl: The ACL for the share. Default value is None.
+ :type share_acl: list[~azure.storage.fileshare.models.SignedIdentifier]
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["acl"] = kwargs.pop("comp", _params.pop("comp", "acl"))
+ content_type: str = kwargs.pop("content_type", _headers.pop("Content-Type", "application/xml"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+ serialization_ctxt = {"xml": {"name": "SignedIdentifiers", "wrapped": True}}
+ if share_acl is not None:
+ _content = self._serialize.body(
+ share_acl, "[SignedIdentifier]", is_xml=True, serialization_ctxt=serialization_ctxt
+ )
+ else:
+ _content = None
+
+ _request = build_set_access_policy_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ content_type=content_type,
+ version=self._config.version,
+ content=_content,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
+
+ @distributed_trace
+ def get_statistics(
+ self,
+ timeout: Optional[int] = None,
+ lease_access_conditions: Optional[_models.LeaseAccessConditions] = None,
+ **kwargs: Any
+ ) -> _models.ShareStats:
+ # pylint: disable=line-too-long
+ """Retrieves statistics related to the share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param lease_access_conditions: Parameter group. Default value is None.
+ :type lease_access_conditions: ~azure.storage.fileshare.models.LeaseAccessConditions
+ :return: ShareStats or the result of cls(response)
+ :rtype: ~azure.storage.fileshare.models.ShareStats
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["stats"] = kwargs.pop("comp", _params.pop("comp", "stats"))
+ cls: ClsType[_models.ShareStats] = kwargs.pop("cls", None)
+
+ _lease_id = None
+ if lease_access_conditions is not None:
+ _lease_id = lease_access_conditions.lease_id
+
+ _request = build_get_statistics_request(
+ url=self._config.url,
+ timeout=timeout,
+ lease_id=_lease_id,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+
+ deserialized = self._deserialize("ShareStats", pipeline_response.http_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers) # type: ignore
+
+ return deserialized # type: ignore
+
+ @distributed_trace
+ def restore( # pylint: disable=inconsistent-return-statements
+ self,
+ timeout: Optional[int] = None,
+ request_id_parameter: Optional[str] = None,
+ deleted_share_name: Optional[str] = None,
+ deleted_share_version: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ # pylint: disable=line-too-long
+ """Restores a previously deleted Share.
+
+ :param timeout: The timeout parameter is expressed in seconds. For more information, see
+ :code:`<a
+ href="https://docs.microsoft.com/en-us/rest/api/storageservices/Setting-Timeouts-for-File-Service-Operations?redirectedfrom=MSDN">Setting
+ Timeouts for File Service Operations.</a>`. Default value is None.
+ :type timeout: int
+ :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character
+ limit that is recorded in the analytics logs when storage analytics logging is enabled. Default
+ value is None.
+ :type request_id_parameter: str
+ :param deleted_share_name: Specifies the name of the previously-deleted share. Default value is
+ None.
+ :type deleted_share_name: str
+ :param deleted_share_version: Specifies the version of the previously-deleted share. Default
+ value is None.
+ :type deleted_share_version: str
+ :return: None or the result of cls(response)
+ :rtype: None
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ error_map: MutableMapping = {
+ 401: ClientAuthenticationError,
+ 404: ResourceNotFoundError,
+ 409: ResourceExistsError,
+ 304: ResourceNotModifiedError,
+ }
+ error_map.update(kwargs.pop("error_map", {}) or {})
+
+ _headers = kwargs.pop("headers", {}) or {}
+ _params = case_insensitive_dict(kwargs.pop("params", {}) or {})
+
+ restype: Literal["share"] = kwargs.pop("restype", _params.pop("restype", "share"))
+ comp: Literal["undelete"] = kwargs.pop("comp", _params.pop("comp", "undelete"))
+ cls: ClsType[None] = kwargs.pop("cls", None)
+
+ _request = build_restore_request(
+ url=self._config.url,
+ timeout=timeout,
+ request_id_parameter=request_id_parameter,
+ deleted_share_name=deleted_share_name,
+ deleted_share_version=deleted_share_version,
+ file_request_intent=self._config.file_request_intent,
+ restype=restype,
+ comp=comp,
+ version=self._config.version,
+ headers=_headers,
+ params=_params,
+ )
+ _request.url = self._client.format_url(_request.url)
+
+ _stream = False
+ pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
+ _request, stream=_stream, **kwargs
+ )
+
+ response = pipeline_response.http_response
+
+ if response.status_code not in [201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.StorageError, pipeline_response)
+ raise HttpResponseError(response=response, model=error)
+
+ response_headers = {}
+ response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag"))
+ response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified"))
+ response_headers["x-ms-request-id"] = self._deserialize("str", response.headers.get("x-ms-request-id"))
+ response_headers["x-ms-client-request-id"] = self._deserialize(
+ "str", response.headers.get("x-ms-client-request-id")
+ )
+ response_headers["x-ms-version"] = self._deserialize("str", response.headers.get("x-ms-version"))
+ response_headers["Date"] = self._deserialize("rfc-1123", response.headers.get("Date"))
+ response_headers["x-ms-share-quota"] = self._deserialize("int", response.headers.get("x-ms-share-quota"))
+ response_headers["x-ms-share-provisioned-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-iops")
+ )
+ response_headers["x-ms-share-provisioned-bandwidth-mibps"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-provisioned-bandwidth-mibps")
+ )
+ response_headers["x-ms-share-included-burst-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-included-burst-iops")
+ )
+ response_headers["x-ms-share-max-burst-credits-for-iops"] = self._deserialize(
+ "int", response.headers.get("x-ms-share-max-burst-credits-for-iops")
+ )
+
+ if cls:
+ return cls(pipeline_response, None, response_headers) # type: ignore
diff --git a/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/py.typed b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/py.typed
new file mode 100644
index 00000000..e5aff4f8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/azure/storage/fileshare/_generated/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561. \ No newline at end of file