diff options
Diffstat (limited to '.venv/lib/python3.12/site-packages/msrest')
26 files changed, 6365 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/msrest/__init__.py b/.venv/lib/python3.12/site-packages/msrest/__init__.py new file mode 100644 index 00000000..efcc2710 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/__init__.py @@ -0,0 +1,40 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from .version import msrest_version +from .configuration import Configuration +from .service_client import ServiceClient, SDKClient +from .serialization import Serializer, Deserializer + +__all__ = [ + "ServiceClient", + "SDKClient", + "Serializer", + "Deserializer", + "Configuration" + ] + +__version__ = msrest_version diff --git a/.venv/lib/python3.12/site-packages/msrest/async_client.py b/.venv/lib/python3.12/site-packages/msrest/async_client.py new file mode 100644 index 00000000..e158cfee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/async_client.py @@ -0,0 +1,127 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import functools +import logging + +from typing import Any, Dict, List, Union, TYPE_CHECKING + +from .universal_http import ClientRequest +from .universal_http.async_requests import AsyncRequestsHTTPSender +from .pipeline import Request, AsyncPipeline, AsyncHTTPPolicy, SansIOHTTPPolicy +from .pipeline.async_requests import ( + AsyncPipelineRequestsHTTPSender, + AsyncRequestsCredentialsPolicy +) +from .pipeline.universal import ( + HTTPLogger, + RawDeserializer, +) + +from .service_client import _ServiceClientCore + +if TYPE_CHECKING: + from .configuration import Configuration # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +class SDKClientAsync: + """The base class of all generated SDK async client. + """ + + def __init__(self, config: 'Configuration') -> None: + self._client = ServiceClientAsync(config) + + async def __aenter__(self): + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details): + await self._client.__aexit__(*exc_details) + + +class ServiceClientAsync(_ServiceClientCore): + + def __init__(self, config: 'Configuration') -> None: + super(ServiceClientAsync, self).__init__(config) + + self.config.pipeline = self._create_default_pipeline() # type: ignore + + def _create_default_pipeline(self): + creds = self.config.credentials + + policies = [ + self.config.user_agent_policy, # UserAgent policy + RawDeserializer(), # Deserialize the raw bytes + self.config.http_logger_policy # HTTP request/response log + ] # type: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] + if creds: + if isinstance(creds, (AsyncHTTPPolicy, SansIOHTTPPolicy)): + policies.insert(1, creds) + else: + # Assume this is the old credentials class, and then requests. Wrap it. + policies.insert(1, AsyncRequestsCredentialsPolicy(creds)) + + return AsyncPipeline( + policies, + AsyncPipelineRequestsHTTPSender( + AsyncRequestsHTTPSender(self.config) # Send HTTP request using requests + ) + ) + + async def __aenter__(self): + await self.config.pipeline.__aenter__() + return self + + async def __aexit__(self, *exc_details): + await self.config.pipeline.__aexit__(*exc_details) + + async def async_send(self, request, **kwargs): + """Prepare and send request object according to configuration. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param content: Any body data to add to the request. + :param config: Any specific config overrides + """ + kwargs.setdefault('stream', True) + # In the current backward compatible implementation, return the HTTP response + # and plug context inside. Could be remove if we modify Autorest, + # but we still need it to be backward compatible + pipeline_response = await self.config.pipeline.run(request, **kwargs) + response = pipeline_response.http_response + response.context = pipeline_response.context + return response + + def stream_download_async(self, response, user_callback): + """Async Generator for streaming request body data. + + :param response: The initial response + :param user_callback: Custom callback for monitoring progress. + """ + block = self.config.connection.data_block_size + return response.stream_download(block, user_callback) diff --git a/.venv/lib/python3.12/site-packages/msrest/async_paging.py b/.venv/lib/python3.12/site-packages/msrest/async_paging.py new file mode 100644 index 00000000..cf498a0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/async_paging.py @@ -0,0 +1,79 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from collections.abc import AsyncIterator +import logging + +_LOGGER = logging.getLogger(__name__) + +class AsyncPagedMixin(AsyncIterator): + + def __init__(self, *args, **kwargs): + """Bring async to Paging. + + "async_command" is mandatory keyword argument for this mixin to work. + """ + self._async_get_next = kwargs.get("async_command") + if not self._async_get_next: + _LOGGER.debug("Paging async iterator protocol is not available for %s", + self.__class__.__name__) + + async def async_get(self, url): + """Get an arbitrary page. + + This resets the iterator and then fully consumes it to return the + specific page **only**. + + :param str url: URL to arbitrary page results. + """ + self.reset() + self.next_link = url + return await self.async_advance_page() + + async def async_advance_page(self): + if not self._async_get_next: + raise NotImplementedError( + "The class %s does not support async paging at the moment.", + self.__class__.__name__ + ) + if self.next_link is None: + raise StopAsyncIteration("End of paging") + self._current_page_iter_index = 0 + self._response = await self._async_get_next(self.next_link) + self._derserializer(self, self._response) + return self.current_page + + async def __anext__(self): + """Iterate through responses.""" + # Storing the list iterator might work out better, but there's no + # guarantee that some code won't replace the list entirely with a copy, + # invalidating an list iterator that might be saved between iterations. + if self.current_page and self._current_page_iter_index < len(self.current_page): + response = self.current_page[self._current_page_iter_index] + self._current_page_iter_index += 1 + return response + else: + await self.async_advance_page() + return await self.__anext__() diff --git a/.venv/lib/python3.12/site-packages/msrest/authentication.py b/.venv/lib/python3.12/site-packages/msrest/authentication.py new file mode 100644 index 00000000..d7d7cad2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/authentication.py @@ -0,0 +1,306 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Optional, Dict + +import requests +from requests.auth import HTTPBasicAuth +import requests_oauthlib as oauth + + +class Authentication(object): + """Default, simple auth object. + Doesn't actually add any auth headers. + """ + + header = "Authorization" + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + return session or requests.Session() + + +class BasicAuthentication(Authentication): + """Implementation of Basic Authentication. + + :param str username: Authentication username. + :param str password: Authentication password. + """ + + def __init__(self, username, password): + # type: (str, str) -> None + self.scheme = 'Basic' + self.username = username + self.password = password + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers + applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(BasicAuthentication, self).signed_session(session) + session.auth = HTTPBasicAuth(self.username, self.password) + return session + + +class BasicTokenAuthentication(Authentication): + """Simple Token Authentication. + Does not adhere to OAuth, simply adds provided token as a header. + + :param dict[str,str] token: Authentication token, must have 'access_token' key. + """ + + def __init__(self, token): + # type: (Dict[str, str]) -> None + self.scheme = 'Bearer' + self.token = token + + def set_token(self): + # type: () -> None + """Should be used to define the self.token attribute. + + In this implementation, does nothing since the token is statically provided + at creation. + """ + pass + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers + applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(BasicTokenAuthentication, self).signed_session(session) + header = "{} {}".format(self.scheme, self.token['access_token']) + session.headers['Authorization'] = header + return session + + +class OAuthTokenAuthentication(BasicTokenAuthentication): + """OAuth Token Authentication. + + Requires that supplied token contains an expires_in field. + + :param str client_id: Account Client ID. + :param dict[str,str] token: OAuth2 token. + """ + + def __init__(self, client_id, token): + # type: (str, Dict[str, str]) -> None + super(OAuthTokenAuthentication, self).__init__(token) + self.id = client_id + self.store_key = self.id + + def construct_auth(self): + # type: () -> str + """Format token header. + + :rtype: str + """ + return "{} {}".format(self.scheme, self.token) + + def refresh_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Return updated session if token has expired, attempts to + refresh using refresh token. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + return self.signed_session(session) + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with any required auth headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = session or requests.Session() # Don't call super on purpose, let's "auth" manage the headers. + session.auth = oauth.OAuth2(self.id, token=self.token) + return session + + +class KerberosAuthentication(Authentication): + """Kerberos Authentication + Kerberos Single Sign On (SSO); requires requests_kerberos is installed. + + :param mutual_authentication: whether to require mutual authentication. Use values from requests_kerberos import REQUIRED, OPTIONAL, or DISABLED + """ + def __init__(self, mutual_authentication=None): + super(KerberosAuthentication, self).__init__() + self.mutual_authentication = mutual_authentication + + def signed_session(self, session=None): + """Create requests session with Negotiate (SPNEGO) headers applied. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(KerberosAuthentication, self).signed_session(session) + try: + from requests_kerberos import HTTPKerberosAuth + except ImportError: + raise ImportError("In order to use KerberosAuthentication please do 'pip install requests_kerberos' first") + if self.mutual_authentication: + session.auth = HTTPKerberosAuth(mutual_authentication=self.mutual_authentication) + else: + session.auth = HTTPKerberosAuth() + return session + + +class ApiKeyCredentials(Authentication): + """Represent the ApiKey feature of Swagger. + + Dict should be dict[str,str] to be accepted by requests. + + :param dict[str,str] in_headers: Headers part of the ApiKey + :param dict[str,str] in_query: ApiKey in the query as parameters + """ + def __init__(self, in_headers=None, in_query=None): + # type: (Optional[Dict[str, str]], Optional[Dict[str, str]]) -> None + super(ApiKeyCredentials, self).__init__() + if in_headers is None: + in_headers = {} + if in_query is None: + in_query = {} + + if not in_headers and not in_query: + raise ValueError("You need to define in_headers or in_query") + + self.in_headers = in_headers + self.in_query = in_query + + def signed_session(self, session=None): + # type: (Optional[requests.Session]) -> requests.Session + """Create requests session with ApiKey. + + If a session object is provided, configure it directly. Otherwise, + create a new session and return it. + + :param session: The session to configure for authentication + :type session: requests.Session + :rtype: requests.Session + """ + session = super(ApiKeyCredentials, self).signed_session(session) + session.headers.update(self.in_headers) + try: + # params is actually Union[bytes, MutableMapping[Text, Text]] + session.params.update(self.in_query) # type: ignore + except AttributeError: # requests.params can be bytes + raise ValueError("session.params must be a dict to be used in ApiKeyCredentials") + return session + + +class CognitiveServicesCredentials(ApiKeyCredentials): + """Cognitive Services authentication. + + :param str subscription_key: The CS subscription key + """ + + _subscription_key_header = 'Ocp-Apim-Subscription-Key' + + def __init__(self, subscription_key): + # type: (str) -> None + if not subscription_key: + raise ValueError("Subscription key cannot be None") + super(CognitiveServicesCredentials, self).__init__( + in_headers={ + self._subscription_key_header: subscription_key, + 'X-BingApis-SDK-Client': 'Python-SDK' + } + ) + + +class TopicCredentials(ApiKeyCredentials): + """Event Grid authentication. + + :param str topic_key: The Event Grid topic key + """ + + _topic_key_header = 'aeg-sas-key' + + def __init__(self, topic_key): + # type: (str) -> None + if not topic_key: + raise ValueError("Topic key cannot be None") + super(TopicCredentials, self).__init__( + in_headers={ + self._topic_key_header: topic_key, + } + ) + + +class DomainCredentials(ApiKeyCredentials): + """Event Grid domain authentication. + + :param str domain_key: The Event Grid domain key + """ + + _domain_key_header = 'aeg-sas-key' + + def __init__(self, domain_key): + # type: (str) -> None + if not domain_key: + raise ValueError("Domain key cannot be None") + super(DomainCredentials, self).__init__( + in_headers={ + self._domain_key_header: domain_key, + } + ) diff --git a/.venv/lib/python3.12/site-packages/msrest/configuration.py b/.venv/lib/python3.12/site-packages/msrest/configuration.py new file mode 100644 index 00000000..5133c7d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/configuration.py @@ -0,0 +1,103 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + + +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore + +from typing import TYPE_CHECKING, Optional, Dict, List, Any, Callable, Union # pylint: disable=unused-import + +from .pipeline import Pipeline +from .universal_http.requests import ( + RequestHTTPSenderConfiguration +) +from .pipeline.universal import ( + UserAgentPolicy, + HTTPLogger, +) + +if TYPE_CHECKING: + from .pipeline import AsyncPipeline + +class Configuration(RequestHTTPSenderConfiguration): + """Client configuration. + + :param str baseurl: REST API base URL. + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, base_url, filepath=None): + # type: (str, Optional[str]) -> None + + super(Configuration, self).__init__(filepath) + # Service + self.base_url = base_url + + # User-Agent as a policy + self.user_agent_policy = UserAgentPolicy() + + # HTTP logger policy + self.http_logger_policy = HTTPLogger() + + # The pipeline. We don't know until a ServiceClient use this configuration if it will be sync or async + # We instantiate with a default empty Pipeline for mypy mostly, trying to use a pipeline from a pure + # configuration object doesn't make sense. + self.pipeline = Pipeline() # type: Union[Pipeline, AsyncPipeline] + + # If set to True, ServiceClient will own the sessionn + self.keep_alive = False + + # Potential credentials pre-declared + self.credentials = None + + if filepath: + self.load(filepath) + + @property + def user_agent(self): + # type: () -> str + """The current user agent value.""" + return self.user_agent_policy.user_agent + + def add_user_agent(self, value): + # type: (str) -> None + """Add value to current user agent with a space. + + :param str value: value to add to user agent. + """ + self.user_agent_policy.add_user_agent(value) + + @property + def enable_http_logger(self): + return self.http_logger_policy.enable_http_logger + + @enable_http_logger.setter + def enable_http_logger(self, value): + self.http_logger_policy.enable_http_logger = value diff --git a/.venv/lib/python3.12/site-packages/msrest/exceptions.py b/.venv/lib/python3.12/site-packages/msrest/exceptions.py new file mode 100644 index 00000000..7d6d8dcf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/exceptions.py @@ -0,0 +1,202 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import sys + +from typing import Callable, Any, Optional, TYPE_CHECKING +from azure.core.exceptions import SerializationError, DeserializationError + +_LOGGER = logging.getLogger(__name__) + + +def raise_with_traceback(exception, message="", *args, **kwargs): + # type: (Callable, str, Any, Any) -> None + """Raise exception with a specified traceback. + + This MUST be called inside a "except" clause. + + :param Exception exception: Error type to be raised. + :param str message: Message to include with error, empty by default. + :param args: Any additional args to be included with exception. + """ + exc_type, exc_value, exc_traceback = sys.exc_info() + # If not called inside a "except", exc_type will be None. Assume it will not happen + exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) # type: ignore + error = exception(exc_msg, *args, **kwargs) + try: + raise error.with_traceback(exc_traceback) + except AttributeError: + error.__traceback__ = exc_traceback + raise error + + +class ClientException(Exception): + """Base exception for all Client Runtime exceptions. + + :param str message: Description of exception. + :param Exception inner_exception: Nested exception (optional). + """ + + def __init__(self, message, inner_exception=None, *args, **kwargs): + # type: (str, Any, str, str) -> None + self.inner_exception = inner_exception + _LOGGER.debug(message) + super(ClientException, self).__init__(message, *args, **kwargs) # type: ignore + + +class TokenExpiredError(ClientException): + """OAuth token expired, request failed.""" + pass + + +class ValidationError(ClientException): + """Request parameter validation failed. + + :param str rule: Validation rule. + :param str target: Target value for the rule. + :param str value: Value that was invalid. + """ + + _messages = { + "min_length": "must have length greater than {!r}.", + "max_length": "must have length less than {!r}.", + "minimum_ex": "must be greater than {!r}.", + "maximum_ex": "must be less than {!r}.", + "minimum": "must be equal to or greater than {!r}.", + "maximum": "must be equal to or less than {!r}.", + "min_items": "must contain at least {!r} items.", + "max_items": "must contain at most {!r} items.", + "pattern": "must conform to the following pattern: {!r}.", + "unique": "must contain only unique items.", + "multiple": "must be a multiple of {!r}.", + "required": "can not be None.", + "type": "must be of type {!r}" + } + + @staticmethod + def _format_message(rule, reason, value): + if rule == "type" and value.startswith(r"{"): + internal_type = value.strip(r"{}") + value = "dict[str, {}]".format(internal_type) + return reason.format(value) + + def __init__(self, rule, target, value, *args, **kwargs): + # type: (str, str, str, str, str) -> None + self.rule = rule + self.target = target + message = "Parameter {!r} ".format(target) + reason = self._messages.get( + rule, "failed to meet validation requirement.") + message += self._format_message(rule, reason, value) + super(ValidationError, self).__init__(message, *args, **kwargs) + + +class ClientRequestError(ClientException): + """Client request failed.""" + pass + + +class AuthenticationError(ClientException): + """Client request failed to authenticate.""" + pass + + +# Needed only here for type checking +if TYPE_CHECKING: + import requests + from .serialization import Deserializer + +class HttpOperationError(ClientException): + """Client request failed due to server-specified HTTP operation error. + Attempts to deserialize response into specific error object. + + :param Deserializer deserialize: Deserializer with data on custom + error objects. + :param requests.Response response: Server response + :param str resp_type: Objects type to deserialize response. + :param args: Additional args to pass to exception object. + :ivar Model error: Deserialized error model. + """ + _DEFAULT_MESSAGE = "Unknown error" + + def __str__(self): + # type: () -> str + return str(self.message) + + def __init__(self, deserialize, response, + resp_type=None, *args, **kwargs): + # type: (Deserializer, Any, Optional[str], str, str) -> None + self.error = None + self.message = self._DEFAULT_MESSAGE + if hasattr(response, 'internal_response'): + self.response = response.internal_response + else: + self.response = response + try: + if resp_type: + self.error = deserialize(resp_type, response) + if self.error is None: + self.error = deserialize.dependencies[resp_type]() + # ARM uses OData v4, try that by default + # http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091 + # Code and Message are REQUIRED + try: + self.message = "({}) {}".format( + self.error.error.code, + self.error.error.message + ) + except AttributeError: + # Try the default for Autorest if not available (compat) + if self.error.message: + self.message = self.error.message + except (DeserializationError, AttributeError, KeyError): + pass + + if not self.error or self.message == self._DEFAULT_MESSAGE: + try: + response.raise_for_status() + # Two possible raises here: + # - Attribute error if response is not ClientResponse. Do not catch. + # - Any internal exception, take it. + except AttributeError: + raise + except Exception as err: # pylint: disable=broad-except + if not self.error: + self.error = err + + if self.message == self._DEFAULT_MESSAGE: + msg = "Operation returned an invalid status code {!r}" + self.message = msg.format(response.reason) + else: + if not self.error: + self.error = response + + # We can't type hint, but at least we can check that + assert self.message is not None + + super(HttpOperationError, self).__init__( + self.message, self.error, *args, **kwargs) diff --git a/.venv/lib/python3.12/site-packages/msrest/http_logger.py b/.venv/lib/python3.12/site-packages/msrest/http_logger.py new file mode 100644 index 00000000..a246ce41 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/http_logger.py @@ -0,0 +1,105 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import re +import types + +from typing import Any, Optional, TYPE_CHECKING # pylint: disable=unused-import + +if TYPE_CHECKING: + from .universal_http import ClientRequest, ClientResponse # pylint: disable=unused-import + +_LOGGER = logging.getLogger(__name__) + + +def log_request(_, request, *_args, **_kwargs): + # type: (Any, ClientRequest, str, str) -> None + """Log a client request. + + :param _: Unused in current version (will be None) + :param requests.Request request: The request object. + """ + if not _LOGGER.isEnabledFor(logging.DEBUG): + return + + try: + _LOGGER.debug("Request URL: %r", request.url) + _LOGGER.debug("Request method: %r", request.method) + _LOGGER.debug("Request headers:") + for header, value in request.headers.items(): + if header.lower() == 'authorization': + value = '*****' + _LOGGER.debug(" %r: %r", header, value) + _LOGGER.debug("Request body:") + + # We don't want to log the binary data of a file upload. + if isinstance(request.body, types.GeneratorType): + _LOGGER.debug("File upload") + else: + _LOGGER.debug(str(request.body)) + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log request: %r", err) + + +def log_response(_, _request, response, *_args, **kwargs): + # type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse] + """Log a server response. + + :param _: Unused in current version (will be None) + :param requests.Request request: The request object. + :param requests.Response response: The response object. + """ + if not _LOGGER.isEnabledFor(logging.DEBUG): + return None + + try: + _LOGGER.debug("Response status: %r", response.status_code) + _LOGGER.debug("Response headers:") + for res_header, value in response.headers.items(): + _LOGGER.debug(" %r: %r", res_header, value) + + # We don't want to log binary data if the response is a file. + _LOGGER.debug("Response content:") + pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) + header = response.headers.get('content-disposition') + + if header and pattern.match(header): + filename = header.partition('=')[2] + _LOGGER.debug("File attachments: %s", filename) + elif response.headers.get("content-type", "").endswith("octet-stream"): + _LOGGER.debug("Body contains binary data.") + elif response.headers.get("content-type", "").startswith("image"): + _LOGGER.debug("Body contains image data.") + else: + if kwargs.get('stream', False): + _LOGGER.debug("Body is streamable") + else: + _LOGGER.debug(response.text()) + return response + except Exception as err: # pylint: disable=broad-except + _LOGGER.debug("Failed to log response: %s", repr(err)) + return response diff --git a/.venv/lib/python3.12/site-packages/msrest/paging.py b/.venv/lib/python3.12/site-packages/msrest/paging.py new file mode 100644 index 00000000..c654ba34 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/paging.py @@ -0,0 +1,146 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys +try: + from collections.abc import Iterator + xrange = range +except ImportError: + from collections import Iterator + +from typing import Dict, Any, List, Callable, Optional, TYPE_CHECKING # pylint: disable=unused-import + +from .serialization import Deserializer +from .pipeline import ClientRawResponse + +if TYPE_CHECKING: + from .universal_http import ClientResponse # pylint: disable=unused-import + from .serialization import Model # pylint: disable=unused-import + +if sys.version_info >= (3, 5, 2): + # Not executed on old Python, no syntax error + from .async_paging import AsyncPagedMixin # type: ignore +else: + class AsyncPagedMixin(object): # type: ignore + pass + +class Paged(AsyncPagedMixin, Iterator): + """A container for paged REST responses. + + :param ClientResponse response: server response object. + :param callable command: Function to retrieve the next page of items. + :param dict classes: A dictionary of class dependencies for + deserialization. + :param dict raw_headers: A dict of raw headers to add if "raw" is called + """ + _validation = {} # type: Dict[str, Dict[str, Any]] + _attribute_map = {} # type: Dict[str, Dict[str, Any]] + + def __init__(self, command, classes, raw_headers=None, **kwargs): + # type: (Callable[[str], ClientResponse], Dict[str, Model], Dict[str, str], Any) -> None + super(Paged, self).__init__(**kwargs) # type: ignore + # Sets next_link, current_page, and _current_page_iter_index. + self.next_link = "" + self._current_page_iter_index = 0 + self.reset() + self._derserializer = Deserializer(classes) + self._get_next = command + self._response = None # type: Optional[ClientResponse] + self._raw_headers = raw_headers + + def __iter__(self): + """Return 'self'.""" + # Since iteration mutates this object, consider it an iterator in-and-of + # itself. + return self + + @classmethod + def _get_subtype_map(cls): + """Required for parity to Model object for deserialization.""" + return {} + + @property + def raw(self): + # type: () -> ClientRawResponse + """Get current page as ClientRawResponse. + + :rtype: ClientRawResponse + """ + raw = ClientRawResponse(self.current_page, self._response) + if self._raw_headers: + raw.add_headers(self._raw_headers) + return raw + + def get(self, url): + # type: (str) -> List[Model] + """Get an arbitrary page. + + This resets the iterator and then fully consumes it to return the + specific page **only**. + + :param str url: URL to arbitrary page results. + """ + self.reset() + self.next_link = url + return self.advance_page() + + def reset(self): + # type: () -> None + """Reset iterator to first page.""" + self.next_link = "" + self.current_page = [] # type: List[Model] + self._current_page_iter_index = 0 + + def advance_page(self): + # type: () -> List[Model] + """Force moving the cursor to the next azure call. + + This method is for advanced usage, iterator protocol is preferred. + + :raises: StopIteration if no further page + :return: The current page list + :rtype: list + """ + if self.next_link is None: + raise StopIteration("End of paging") + self._current_page_iter_index = 0 + self._response = self._get_next(self.next_link) + self._derserializer(self, self._response) + return self.current_page + + def __next__(self): + """Iterate through responses.""" + # Storing the list iterator might work out better, but there's no + # guarantee that some code won't replace the list entirely with a copy, + # invalidating an list iterator that might be saved between iterations. + if self.current_page and self._current_page_iter_index < len(self.current_page): + response = self.current_page[self._current_page_iter_index] + self._current_page_iter_index += 1 + return response + else: + self.advance_page() + return self.__next__() + + next = __next__ # Python 2 compatibility. diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/__init__.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/__init__.py new file mode 100644 index 00000000..428cf8cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/__init__.py @@ -0,0 +1,329 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import abc +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore +import json +import logging +import os.path +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +import xml.etree.ElementTree as ET + +from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator # pylint: disable=unused-import + +HTTPResponseType = TypeVar("HTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +# This file is NOT using any "requests" HTTP implementation +# However, the CaseInsensitiveDict is handy. +# If one day we reach the point where "requests" can be skip totally, +# might provide our own implementation +from requests.structures import CaseInsensitiveDict + + +_LOGGER = logging.getLogger(__name__) + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore + +try: + from contextlib import AbstractContextManager # type: ignore +except ImportError: # Python <= 3.5 + class AbstractContextManager(object): # type: ignore + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + +class HTTPPolicy(ABC, Generic[HTTPRequestType, HTTPResponseType]): + """An http policy ABC. + """ + def __init__(self): + self.next = None + + @abc.abstractmethod + def send(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + """Mutate the request. + + Context content is dependent of the HTTPSender. + """ + pass + +class SansIOHTTPPolicy(Generic[HTTPRequestType, HTTPResponseType]): + """Represents a sans I/O policy. + + This policy can act before the I/O, and after the I/O. + Use this policy if the actual I/O in the middle is an implementation + detail. + + Context is not available, since it's implementation dependent. + if a policy needs a context of the Sender, it can't be universal. + + Example: setting a UserAgent does not need to be tight to + sync or async implementation or specific HTTP lib + """ + def on_request(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> None + """Is executed before sending the request to next policy. + """ + pass + + def on_response(self, request, response, **kwargs): + # type: (Request[HTTPRequestType], Response[HTTPRequestType, HTTPResponseType], Any) -> None + """Is executed after the request comes back from the policy. + """ + pass + + def on_exception(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> bool + """Is executed if an exception comes back from the following + policy. + + Return True if the exception has been handled and should not + be forwarded to the caller. + + This method is executed inside the exception handler. + To get the exception, raise and catch it: + + try: + raise + except MyError: + do_something() + + or use + + exc_type, exc_value, exc_traceback = sys.exc_info() + """ + return False + +class _SansIOHTTPPolicyRunner(HTTPPolicy, Generic[HTTPRequestType, HTTPResponseType]): + """Sync implementation of the SansIO policy. + """ + + def __init__(self, policy): + # type: (SansIOHTTPPolicy) -> None + super(_SansIOHTTPPolicyRunner, self).__init__() + self._policy = policy + + def send(self, request, **kwargs): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + self._policy.on_request(request, **kwargs) + try: + response = self.next.send(request, **kwargs) + except Exception: + if not self._policy.on_exception(request, **kwargs): + raise + else: + self._policy.on_response(request, response, **kwargs) + return response + +class Pipeline(AbstractContextManager, Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. + """ + + def __init__(self, policies=None, sender=None): + # type: (List[Union[HTTPPolicy, SansIOHTTPPolicy]], HTTPSender) -> None + self._impl_policies = [] # type: List[HTTPPolicy] + if not sender: + # Import default only if nothing is provided + from .requests import PipelineRequestsHTTPSender + self._sender = cast(HTTPSender, PipelineRequestsHTTPSender()) + else: + self._sender = sender + for policy in (policies or []): + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOHTTPPolicyRunner(policy)) + else: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies)-1): + self._impl_policies[index].next = self._impl_policies[index+1] + if self._impl_policies: + self._impl_policies[-1].next = self._sender + + def __enter__(self): + # type: () -> Pipeline + self._sender.__enter__() + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self._sender.__exit__(*exc_details) + + def run(self, request, **kwargs): + # type: (HTTPRequestType, Any) -> Response + context = self._sender.build_context() + pipeline_request = Request(request, context) # type: Request[HTTPRequestType] + first_node = self._impl_policies[0] if self._impl_policies else self._sender + return first_node.send(pipeline_request, **kwargs) # type: ignore + +class HTTPSender(AbstractContextManager, ABC, Generic[HTTPRequestType, HTTPResponseType]): + """An http sender ABC. + """ + + @abc.abstractmethod + def send(self, request, **config): + # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] + """Send the request using this HTTP sender. + """ + pass + + def build_context(self): + # type: () -> Any + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + +class Request(Generic[HTTPRequestType]): + """Represents a HTTP request in a Pipeline. + + URL can be given without query parameters, to be added later using "format_parameters". + + Instance can be created without data, to be added later using "add_content" + + Instance can be created without files, to be added later using "add_formdata" + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: At least complete scheme/host/path + :param dict[str,str] headers: HTTP headers + :param files: Files list. + :param data: Body to be sent. + :type data: bytes or str. + """ + def __init__(self, http_request, context=None): + # type: (HTTPRequestType, Optional[Any]) -> None + self.http_request = http_request + self.context = context + + +class Response(Generic[HTTPRequestType, HTTPResponseType]): + """A pipeline response object. + + The Response interface exposes an HTTP response object as it returns through the pipeline of Policy objects. + This ensures that Policy objects have access to the HTTP response. + + This also have a "context" dictionary where policy can put additional fields. + Policy SHOULD update the "context" dictionary with additional post-processed field if they create them. + However, nothing prevents a policy to actually sub-class this class a return it instead of the initial instance. + """ + def __init__(self, request, http_response, context=None): + # type: (Request[HTTPRequestType], HTTPResponseType, Optional[Dict[str, Any]]) -> None + self.request = request + self.http_response = http_response + self.context = context or {} + + +# ClientRawResponse is in Pipeline for compat, but technically there is nothing Pipeline here, this is deserialization + +if TYPE_CHECKING: + from ..universal_http import ClientResponse + +class ClientRawResponse(object): + """Wrapper for response object. + This allows for additional data to be gathereded from the response, + for example deserialized headers. + It also allows the raw response object to be passed back to the user. + + :param output: Deserialized response object. This is the type that would have been returned + directly by the main operation without raw=True. + :param response: Raw response object (by default requests.Response instance) + :type response: ~requests.Response + """ + + def __init__(self, output, response): + # type: (Union[Any], Optional[Union[Response, ClientResponse]]) -> None + from ..serialization import Deserializer + + if isinstance(response, Response): + # If pipeline response, remove that layer + response = response.http_response + + try: + # If universal driver, remove that layer + self.response = response.internal_response # type: ignore + except AttributeError: + self.response = response + + self.output = output + self.headers = {} # type: Dict[str, Optional[Any]] + self._deserialize = Deserializer() + + def add_headers(self, header_dict): + # type: (Dict[str, str]) -> None + """Deserialize a specific header. + + :param dict header_dict: A dictionary containing the name of the + header and the type to deserialize to. + """ + if not self.response: + return + for name, data_type in header_dict.items(): + value = self.response.headers.get(name) + value = self._deserialize(data_type, value) + self.headers[name] = value + + + +__all__ = [ + 'Request', + 'Response', + 'Pipeline', + 'HTTPPolicy', + 'SansIOHTTPPolicy', + 'HTTPSender', + # backward compat + 'ClientRawResponse', +] + +try: + from .async_abc import AsyncPipeline, AsyncHTTPPolicy, AsyncHTTPSender # pylint: disable=unused-import + from .async_abc import __all__ as _async_all + __all__ += _async_all +except SyntaxError: # Python 2 + pass +except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode + pass diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/aiohttp.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/aiohttp.py new file mode 100644 index 00000000..cefb1a5a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/aiohttp.py @@ -0,0 +1,62 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, Optional + +from ..universal_http.aiohttp import AioHTTPSender as _AioHTTPSenderDriver +from . import AsyncHTTPSender, Request, Response + +# Matching requests, because why not? +CONTENT_CHUNK_SIZE = 10 * 1024 + +class AioHTTPSender(AsyncHTTPSender): + """AioHttp HTTP sender implementation. + """ + + def __init__(self, driver: Optional[_AioHTTPSenderDriver] = None, *, loop=None) -> None: + self.driver = driver or _AioHTTPSenderDriver(loop=loop) + + async def __aenter__(self): + await self.driver.__aenter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self.driver.__aexit__(*exc_details) + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + async def send(self, request: Request, **config: Any) -> Response: + """Send the request using this HTTP sender. + """ + return Response( + request, + await self.driver.send(request.http_request) + ) diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/async_abc.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/async_abc.py new file mode 100644 index 00000000..3fa73dda --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/async_abc.py @@ -0,0 +1,165 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Any, List, Union, Callable, AsyncIterator, Optional, Generic, TypeVar + +from . import Request, Response, Pipeline, SansIOHTTPPolicy + + +AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") +HTTPRequestType = TypeVar("HTTPRequestType") + +try: + from contextlib import AbstractAsyncContextManager # type: ignore +except ImportError: # Python <= 3.7 + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + + + +class AsyncHTTPPolicy(abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """An http policy ABC. + """ + def __init__(self) -> None: + # next will be set once in the pipeline + self.next = None # type: Optional[Union[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType], AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]]] + + @abc.abstractmethod + async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + """Mutate the request. + + Context content is dependent of the HTTPSender. + """ + pass + + +class _SansIOAsyncHTTPPolicyRunner(AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]): + """Async implementation of the SansIO policy. + """ + + def __init__(self, policy: SansIOHTTPPolicy) -> None: + super(_SansIOAsyncHTTPPolicyRunner, self).__init__() + self._policy = policy + + async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + self._policy.on_request(request, **kwargs) + try: + response = await self.next.send(request, **kwargs) # type: ignore + except Exception: + if not self._policy.on_exception(request, **kwargs): + raise + else: + self._policy.on_response(request, response, **kwargs) + return response + + +class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """An http sender ABC. + """ + + @abc.abstractmethod + async def send(self, request: Request[HTTPRequestType], **config: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + """Send the request using this HTTP sender. + """ + pass + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + def __enter__(self): + raise TypeError("Use async with instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + +class AsyncPipeline(AbstractAsyncContextManager, Generic[HTTPRequestType, AsyncHTTPResponseType]): + """A pipeline implementation. + + This is implemented as a context manager, that will activate the context + of the HTTP sender. + """ + + def __init__(self, policies: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] = None, sender: Optional[AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]] = None) -> None: + self._impl_policies = [] # type: List[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]] + if sender: + self._sender = sender + else: + # Import default only if nothing is provided + from .aiohttp import AioHTTPSender + self._sender = AioHTTPSender() + + for policy in (policies or []): + if isinstance(policy, SansIOHTTPPolicy): + self._impl_policies.append(_SansIOAsyncHTTPPolicyRunner(policy)) + else: + self._impl_policies.append(policy) + for index in range(len(self._impl_policies)-1): + self._impl_policies[index].next = self._impl_policies[index+1] + if self._impl_policies: + self._impl_policies[-1].next = self._sender + + def __enter__(self): + raise TypeError("Use 'async with' instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> 'AsyncPipeline': + await self._sender.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self._sender.__aexit__(*exc_details) + + async def run(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: + context = self._sender.build_context() + pipeline_request = Request(request, context) + first_node = self._impl_policies[0] if self._impl_policies else self._sender + return await first_node.send(pipeline_request, **kwargs) # type: ignore + +__all__ = [ + 'AsyncHTTPPolicy', + 'AsyncHTTPSender', + 'AsyncPipeline', +]
\ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/async_requests.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/async_requests.py new file mode 100644 index 00000000..d5d9ec2f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/async_requests.py @@ -0,0 +1,129 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +import functools +import logging +from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback +) +from ..universal_http.async_requests import AsyncBasicRequestsHTTPSender +from . import AsyncHTTPSender, AsyncHTTPPolicy, Response, Request +from .requests import RequestsContext + + +_LOGGER = logging.getLogger(__name__) + + +class AsyncPipelineRequestsHTTPSender(AsyncHTTPSender): + """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. + """ + + def __init__(self, universal_http_requests_driver: Optional[AsyncBasicRequestsHTTPSender]=None) -> None: + self.driver = universal_http_requests_driver or AsyncBasicRequestsHTTPSender() + + async def __aenter__(self) -> 'AsyncPipelineRequestsHTTPSender': + await self.driver.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self.driver.__aexit__(*exc_details) + + async def close(self): + await self.__aexit__() + + def build_context(self): + # type: () -> RequestsContext + return RequestsContext( + session=self.driver.session, + ) + + async def send(self, request: Request, **kwargs) -> Response: + """Send request object according to configuration. + + :param Request request: The request object to be sent. + """ + if request.context is None: # Should not happen, but make mypy happy and does not hurt + request.context = self.build_context() + + if request.context.session is not self.driver.session: + kwargs['session'] = request.context.session + + return Response( + request, + await self.driver.send(request.http_request, **kwargs) + ) + + +class AsyncRequestsCredentialsPolicy(AsyncHTTPPolicy): + """Implementation of request-oauthlib except and retry logic. + """ + def __init__(self, credentials): + super(AsyncRequestsCredentialsPolicy, self).__init__() + self._creds = credentials + + async def send(self, request, **kwargs): + session = request.context.session + try: + self._creds.signed_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.signed_session() + + try: + try: + return await self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + error = "Token expired or is invalid. Attempting to refresh." + _LOGGER.warning(error) + + try: + try: + self._creds.refresh_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.refresh_session() + + return await self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + msg = "Token expired or is invalid." + raise_with_traceback(TokenExpiredError, msg, err) + + except (requests.RequestException, + oauth2.rfc6749.errors.OAuth2Error) as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/requests.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/requests.py new file mode 100644 index 00000000..2dac5a3b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/requests.py @@ -0,0 +1,194 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import contextlib +import logging +import threading +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +import warnings + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from urllib3 import Retry # Needs requests 2.16 at least to be safe + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback +) +from ..universal_http import ClientRequest +from ..universal_http.requests import BasicRequestsHTTPSender +from . import HTTPSender, HTTPPolicy, Response, Request + + +_LOGGER = logging.getLogger(__name__) + + +class RequestsCredentialsPolicy(HTTPPolicy): + """Implementation of request-oauthlib except and retry logic. + """ + def __init__(self, credentials): + super(RequestsCredentialsPolicy, self).__init__() + self._creds = credentials + + def send(self, request, **kwargs): + session = request.context.session + try: + self._creds.signed_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.signed_session() + + try: + try: + return self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + error = "Token expired or is invalid. Attempting to refresh." + _LOGGER.warning(error) + + try: + try: + self._creds.refresh_session(session) + except TypeError: # Credentials does not support session injection + _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") + request.context.session = session = self._creds.refresh_session() + + return self.next.send(request, **kwargs) + except (oauth2.rfc6749.errors.InvalidGrantError, + oauth2.rfc6749.errors.TokenExpiredError) as err: + msg = "Token expired or is invalid." + raise_with_traceback(TokenExpiredError, msg, err) + + except (requests.RequestException, + oauth2.rfc6749.errors.OAuth2Error) as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + +class RequestsPatchSession(HTTPPolicy): + """Implements request level configuration + that are actually to be done at the session level. + + This is highly deprecated, and is totally legacy. + The pipeline structure allows way better design for this. + """ + _protocols = ['http://', 'https://'] + + def send(self, request, **kwargs): + """Patch the current session with Request level operation config. + + This is deprecated, we shouldn't patch the session with + arguments at the Request, and "config" should be used. + """ + session = request.context.session + + old_max_redirects = None + if 'max_redirects' in kwargs: + warnings.warn("max_redirects in operation kwargs is deprecated, use config.redirect_policy instead", + DeprecationWarning) + old_max_redirects = session.max_redirects + session.max_redirects = int(kwargs['max_redirects']) + + old_trust_env = None + if 'use_env_proxies' in kwargs: + warnings.warn("use_env_proxies in operation kwargs is deprecated, use config.proxies instead", + DeprecationWarning) + old_trust_env = session.trust_env + session.trust_env = bool(kwargs['use_env_proxies']) + + old_retries = {} + if 'retries' in kwargs: + warnings.warn("retries in operation kwargs is deprecated, use config.retry_policy instead", + DeprecationWarning) + max_retries = kwargs['retries'] + for protocol in self._protocols: + old_retries[protocol] = session.adapters[protocol].max_retries + session.adapters[protocol].max_retries = max_retries + + try: + return self.next.send(request, **kwargs) + finally: + if old_max_redirects: + session.max_redirects = old_max_redirects + + if old_trust_env: + session.trust_env = old_trust_env + + if old_retries: + for protocol in self._protocols: + session.adapters[protocol].max_retries = old_retries[protocol] + +class RequestsContext(object): + def __init__(self, session): + self.session = session + + +class PipelineRequestsHTTPSender(HTTPSender): + """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. + """ + + def __init__(self, universal_http_requests_driver=None): + # type: (Optional[BasicRequestsHTTPSender]) -> None + self.driver = universal_http_requests_driver or BasicRequestsHTTPSender() + + def __enter__(self): + # type: () -> PipelineRequestsHTTPSender + self.driver.__enter__() + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self.driver.__exit__(*exc_details) + + def close(self): + self.__exit__() + + def build_context(self): + # type: () -> RequestsContext + return RequestsContext( + session=self.driver.session, + ) + + def send(self, request, **kwargs): + # type: (Request[ClientRequest], Any) -> Response + """Send request object according to configuration. + + :param Request request: The request object to be sent. + """ + if request.context is None: # Should not happen, but make mypy happy and does not hurt + request.context = self.build_context() + + if request.context.session is not self.driver.session: + kwargs['session'] = request.context.session + + return Response( + request, + self.driver.send(request.http_request, **kwargs) + ) diff --git a/.venv/lib/python3.12/site-packages/msrest/pipeline/universal.py b/.venv/lib/python3.12/site-packages/msrest/pipeline/universal.py new file mode 100644 index 00000000..b8dc40c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/pipeline/universal.py @@ -0,0 +1,253 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module represents universal policy that works whatever the HTTPSender implementation +""" +import json +import logging +import os +import xml.etree.ElementTree as ET +import platform +import codecs +import re + +from typing import Mapping, Any, Optional, AnyStr, Union, IO, cast, TYPE_CHECKING # pylint: disable=unused-import + +from ..version import msrest_version as _msrest_version +from . import SansIOHTTPPolicy +from ..exceptions import DeserializationError, raise_with_traceback +from ..http_logger import log_request, log_response + +if TYPE_CHECKING: + from . import Request, Response # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +_BOM = codecs.BOM_UTF8.decode(encoding='utf-8') + + +class HeadersPolicy(SansIOHTTPPolicy): + """A simple policy that sends the given headers + with the request. + + This overwrite any headers already defined in the request. + """ + def __init__(self, headers): + # type: (Mapping[str, str]) -> None + self.headers = headers + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + http_request.headers.update(self.headers) + +class UserAgentPolicy(SansIOHTTPPolicy): + _USERAGENT = "User-Agent" + _ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT' + + def __init__(self, user_agent=None, overwrite=False): + # type: (Optional[str], bool) -> None + self._overwrite = overwrite + if user_agent is None: + self._user_agent = "python/{} ({}) msrest/{}".format( + platform.python_version(), + platform.platform(), + _msrest_version + ) + else: + self._user_agent = user_agent + + # Whatever you gave me a header explicitly or not, + # if the env variable is set, add to it. + add_user_agent_header = os.environ.get(self._ENV_ADDITIONAL_USER_AGENT, None) + if add_user_agent_header is not None: + self.add_user_agent(add_user_agent_header) + + @property + def user_agent(self): + # type: () -> str + """The current user agent value.""" + return self._user_agent + + def add_user_agent(self, value): + # type: (str) -> None + """Add value to current user agent with a space. + + :param str value: value to add to user agent. + """ + self._user_agent = "{} {}".format(self._user_agent, value) + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + if self._overwrite or self._USERAGENT not in http_request.headers: + http_request.headers[self._USERAGENT] = self._user_agent + +class HTTPLogger(SansIOHTTPPolicy): + """A policy that logs HTTP request and response to the DEBUG logger. + + This accepts both global configuration, and kwargs request level with "enable_http_logger" + """ + def __init__(self, enable_http_logger = False): + self.enable_http_logger = enable_http_logger + + def on_request(self, request, **kwargs): + # type: (Request, Any) -> None + http_request = request.http_request + if kwargs.get("enable_http_logger", self.enable_http_logger): + log_request(None, http_request) + + def on_response(self, request, response, **kwargs): + # type: (Request, Response, Any) -> None + http_request = request.http_request + if kwargs.get("enable_http_logger", self.enable_http_logger): + log_response(None, http_request, response.http_response, result=response) + + +class RawDeserializer(SansIOHTTPPolicy): + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r'^(application|text)/([a-z+.]+\+)?json$') + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, 'read'): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding='utf-8-sig') + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if 'content-type' in headers: + content_type = headers['content-type'].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + def on_response(self, request, response, **kwargs): + # type: (Request, Response, Any) -> None + """Extract data from the body of a REST response object. + + This will load the entire payload in memory. + + Will follow Content-Type to parse. + We assume everything is UTF8 (BOM acceptable). + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML + """ + # If response was asked as stream, do NOT read anything and quit now + if kwargs.get("stream", True): + return + + http_response = response.http_response + + response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( + http_response.text(), + http_response.headers + ) diff --git a/.venv/lib/python3.12/site-packages/msrest/polling/__init__.py b/.venv/lib/python3.12/site-packages/msrest/polling/__init__.py new file mode 100644 index 00000000..bc79e476 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/polling/__init__.py @@ -0,0 +1,34 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import sys + +from .poller import LROPoller, NoPolling, PollingMethod +__all__ = ['LROPoller', 'NoPolling', 'PollingMethod'] + +if sys.version_info >= (3, 5, 2): + # Not executed on old Python, no syntax error + from .async_poller import AsyncNoPolling, AsyncPollingMethod, async_poller + __all__ += ['AsyncNoPolling', 'AsyncPollingMethod', 'async_poller'] diff --git a/.venv/lib/python3.12/site-packages/msrest/polling/async_poller.py b/.venv/lib/python3.12/site-packages/msrest/polling/async_poller.py new file mode 100644 index 00000000..63c20b48 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/polling/async_poller.py @@ -0,0 +1,88 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from .poller import NoPolling as _NoPolling + +from ..serialization import Model +from ..async_client import ServiceClientAsync +from ..pipeline import ClientRawResponse + +class AsyncPollingMethod(object): + """ABC class for polling method. + """ + def initialize(self, client, initial_response, deserialization_callback): + raise NotImplementedError("This method needs to be implemented") + + async def run(self): + raise NotImplementedError("This method needs to be implemented") + + def status(self): + raise NotImplementedError("This method needs to be implemented") + + def finished(self): + raise NotImplementedError("This method needs to be implemented") + + def resource(self): + raise NotImplementedError("This method needs to be implemented") + + +class AsyncNoPolling(_NoPolling): + """An empty async poller that returns the deserialized initial response. + """ + async def run(self): + """Empty run, no polling. + + Just override initial run to add "async" + """ + pass + + +async def async_poller(client, initial_response, deserialization_callback, polling_method): + """Async Poller for long running operations. + + :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. + :type client: msrest.service_client.ServiceClient + :param initial_response: The initial call response + :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: msrest.polling.PollingMethod + """ + + try: + client = client if isinstance(client, ServiceClientAsync) else client._client + except AttributeError: + raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") + response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response + + if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): + deserialization_callback = deserialization_callback.deserialize + + # Might raise a CloudError + polling_method.initialize(client, response, deserialization_callback) + + await polling_method.run() + return polling_method.resource() diff --git a/.venv/lib/python3.12/site-packages/msrest/polling/poller.py b/.venv/lib/python3.12/site-packages/msrest/polling/poller.py new file mode 100644 index 00000000..2c3e6773 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/polling/poller.py @@ -0,0 +1,237 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import threading +import time +import uuid +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse + +from typing import Any, Callable, Union, List, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + import requests + +from ..serialization import Model +from ..service_client import ServiceClient +from ..pipeline import ClientRawResponse + +class PollingMethod(object): + """ABC class for polling method. + """ + def initialize(self, client, initial_response, deserialization_callback): + # type: (Any, Any, Any) -> None + raise NotImplementedError("This method needs to be implemented") + + def run(self): + # type: () -> None + raise NotImplementedError("This method needs to be implemented") + + def status(self): + # type: () -> str + raise NotImplementedError("This method needs to be implemented") + + def finished(self): + # type: () -> bool + raise NotImplementedError("This method needs to be implemented") + + def resource(self): + # type: () -> Any + raise NotImplementedError("This method needs to be implemented") + +class NoPolling(PollingMethod): + """An empty poller that returns the deserialized initial response. + """ + def __init__(self): + self._initial_response = None + self._deserialization_callback = None + + def initialize(self, _, initial_response, deserialization_callback): + # type: (Any, requests.Response, Callable) -> None + self._initial_response = initial_response + self._deserialization_callback = deserialization_callback + + def run(self): + # type: () -> None + """Empty run, no polling. + """ + pass + + def status(self): + # type: () -> str + """Return the current status as a string. + :rtype: str + """ + return "succeeded" + + def finished(self): + # type: () -> bool + """Is this polling finished? + :rtype: bool + """ + return True + + def resource(self): + # type: () -> Any + return self._deserialization_callback(self._initial_response) + + +class LROPoller(object): + """Poller for long running operations. + + :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. + :type client: msrest.service_client.ServiceClient + :param initial_response: The initial call response + :type initial_response: requests.Response or msrest.pipeline.ClientRawResponse + :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. + :type deserialization_callback: callable or msrest.serialization.Model + :param polling_method: The polling strategy to adopt + :type polling_method: msrest.polling.PollingMethod + """ + + def __init__(self, client, initial_response, deserialization_callback, polling_method): + # type: (Any, Union[ClientRawResponse, requests.Response], Union[Model, Callable[[requests.Response], Model]], PollingMethod) -> None + try: + self._client = client if isinstance(client, ServiceClient) else client._client # type: ServiceClient + except AttributeError: + raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") + self._response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response + self._callbacks = [] # type: List[Callable] + self._polling_method = polling_method + + if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): + deserialization_callback = deserialization_callback.deserialize # type: ignore + + # Might raise a CloudError + self._polling_method.initialize(self._client, self._response, deserialization_callback) + + # Prepare thread execution + self._thread = None + self._done = None + self._exception = None + if not self._polling_method.finished(): + self._done = threading.Event() + self._thread = threading.Thread( + target=self._start, + name="LROPoller({})".format(uuid.uuid4())) + self._thread.daemon = True + self._thread.start() + + def _start(self): + """Start the long running operation. + On completion, runs any callbacks. + + :param callable update_cmd: The API request to check the status of + the operation. + """ + try: + self._polling_method.run() + except Exception as err: + self._exception = err + + finally: + self._done.set() + + callbacks, self._callbacks = self._callbacks, [] + while callbacks: + for call in callbacks: + call(self._polling_method) + callbacks, self._callbacks = self._callbacks, [] + + def status(self): + # type: () -> str + """Returns the current status string. + + :returns: The current status string + :rtype: str + """ + return self._polling_method.status() + + def result(self, timeout=None): + # type: (Optional[float]) -> Model + """Return the result of the long running operation, or + the result available after the specified timeout. + + :returns: The deserialized resource of the long running operation, + if one is available. + :raises CloudError: Server problem with the query. + """ + self.wait(timeout) + return self._polling_method.resource() + + def wait(self, timeout=None): + # type: (Optional[float]) -> None + """Wait on the long running operation for a specified length + of time. You can check if this call as ended with timeout with the + "done()" method. + + :param float timeout: Period of time to wait for the long running + operation to complete (in seconds). + :raises CloudError: Server problem with the query. + """ + if self._thread is None: + return + self._thread.join(timeout=timeout) + try: + # Let's handle possible None in forgiveness here + raise self._exception # type: ignore + except TypeError: # Was None + pass + + def done(self): + # type: () -> bool + """Check status of the long running operation. + + :returns: 'True' if the process has completed, else 'False'. + """ + return self._thread is None or not self._thread.is_alive() + + def add_done_callback(self, func): + # type: (Callable) -> None + """Add callback function to be run once the long running operation + has completed - regardless of the status of the operation. + + :param callable func: Callback function that takes at least one + argument, a completed LongRunningOperation. + """ + # Still use "_done" and not "done", since CBs are executed inside the thread. + if self._done is None or self._done.is_set(): + func(self._polling_method) + # Let's add them still, for consistency (if you wish to access to it for some reasons) + self._callbacks.append(func) + + def remove_done_callback(self, func): + # type: (Callable) -> None + """Remove a callback from the long running operation. + + :param callable func: The function to be removed from the callbacks. + :raises: ValueError if the long running operation has already + completed. + """ + if self._done is None or self._done.is_set(): + raise ValueError("Process is complete.") + self._callbacks = [c for c in self._callbacks if c != func] diff --git a/.venv/lib/python3.12/site-packages/msrest/py.typed b/.venv/lib/python3.12/site-packages/msrest/py.typed new file mode 100644 index 00000000..e5aff4f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561.
\ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/msrest/serialization.py b/.venv/lib/python3.12/site-packages/msrest/serialization.py new file mode 100644 index 00000000..64cb2b78 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/serialization.py @@ -0,0 +1,1994 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any + +from .exceptions import ( + ValidationError, + SerializationError, + DeserializationError, + raise_with_traceback) + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds()/3600) + + def __repr__(self): + return "<FixedOffset {}>".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + +try: + from datetime import timezone + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(?<!\\)\.") + +def attribute_transformer(key, attr_desc, value): + """A key transformer that returns the Python attribute. + + :param str key: The attribute name + :param dict attr_desc: The attribute metadata + :param object value: The value + :returns: A key using attribute name + """ + return (key, value) + +def full_restapi_key_transformer(key, attr_desc, value): + """A key transformer that returns the full RestAPI key path. + + :param str _: The attribute name + :param dict attr_desc: The attribute metadata + :param object value: The value + :returns: A list of keys using RestAPI syntax. + """ + keys = _FLATTEN.split(attr_desc['key']) + return ([_decode_attribute_map_key(k) for k in keys], value) + +def last_restapi_key_transformer(key, attr_desc, value): + """A key transformer that returns the last RestAPI key. + + :param str key: The attribute name + :param dict attr_desc: The attribute metadata + :param object value: The value + :returns: The last RestAPI key. + """ + key, value = full_restapi_key_transformer(key, attr_desc, value) + return (key[-1], value) + +def _recursive_validate(attr_name, attr_type, data): + result = [] + if attr_type.startswith('[') and data is not None: + for content in data: + result += _recursive_validate(attr_name+" content", attr_type[1:-1], content) + elif attr_type.startswith('{') and data is not None: + if not hasattr(data, 'values'): + raise ValidationError("type", attr_name, attr_type) + for content in data.values(): + result += _recursive_validate(attr_name+" content", attr_type[1:-1], content) + elif hasattr(data, '_validation'): + return data.validate() + return result + +def _create_xml_node(tag, prefix=None, ns=None): + """Create a XML node.""" + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{"+ns+"}"+tag) + else: + return ET.Element(tag) + +class Model(object): + """Mixin for all client request body/response body models to support + serialization and deserialization. + """ + + _subtype_map = {} # type: Dict[str, Dict[str, Any]] + _attribute_map = {} # type: Dict[str, Dict[str, Any]] + _validation = {} # type: Dict[str, Dict[str, Any]] + + def __init__(self, **kwargs): + self.additional_properties = {} + for k in kwargs: + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other): + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other): + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self): + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls): + cls._attribute_map['additional_properties'] = {'key': '', 'type': '{object}'} + + @classmethod + def is_xml_model(cls): + try: + cls._xml_map + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + """ + try: + xml_map = cls._xml_map + except AttributeError: + xml_map = {} + + return _create_xml_node( + xml_map.get('name', cls.__name__), + xml_map.get("prefix", None), + xml_map.get("ns", None) + ) + + def validate(self): + """Validate this model recursively and return a list of ValidationError. + + :returns: A list of validation error + :rtype: list + """ + validation_result = [] + for attr_name, value in [(attr, getattr(self, attr)) for attr in self._attribute_map]: + attr_desc = self._attribute_map[attr_name] + if attr_name == "additional_properties" and attr_desc["key"] == '': + # Do NOT validate additional_properties + continue + attr_type = attr_desc['type'] + + try: + debug_name = "{}.{}".format(self.__class__.__name__, attr_name) + # https://github.com/Azure/msrest-for-python/issues/85 + if value is not None and attr_type in Serializer.basic_types.values(): + value = Serializer.serialize_basic(value, attr_type) + Serializer.validate(value, debug_name, **self._validation.get(attr_name, {})) + except ValidationError as validation_error: + validation_result.append(validation_error) + + validation_result += _recursive_validate(attr_name, attr_type, value) + return validation_result + + def serialize(self, keep_readonly=False, **kwargs): + """Return the JSON that would be sent to azure from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + + def as_dict(self, keep_readonly=True, key_transformer=attribute_transformer, **kwargs): + """Return a dict that can be JSONify using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit('.', 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data, content_type=None): + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) + + @classmethod + def from_dict(cls, data, key_extractors=None, content_type=None): + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor + ] if key_extractors is None else key_extractors + return deserializer(cls.__name__, data, content_type=content_type) + + @classmethod + def _flatten_subtype(cls, key, objects): + if '_subtype_map' not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + """ + for subtype_key in cls.__dict__.get('_subtype_map', {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor( + subtype_key, + cls._attribute_map[subtype_key], + response + ) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning( + "Discriminator %s is absent or null, use base class %s.", + subtype_key, + cls.__name__ + ) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]['key']) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + """ + return key.replace('\\.', '.') + + +class Serializer(object): + """Request object model serializer.""" + + basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'} + + _xml_basic_types_serializers = {'bool': lambda x:str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", + 4: "Fri", 5: "Sat", 6: "Sun"} + months = {1: "Jan", 2: "Feb", 3: "Mar", 4: "Apr", 5: "May", 6: "Jun", + 7: "Jul", 8: "Aug", 9: "Sep", 10: "Oct", 11: "Nov", 12: "Dec"} + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0 + } + + def __init__(self, classes=None): + self.serialize_type = { + 'iso-8601': Serializer.serialize_iso, + 'rfc-1123': Serializer.serialize_rfc, + 'unix-time': Serializer.serialize_unix, + 'duration': Serializer.serialize_duration, + 'date': Serializer.serialize_date, + 'time': Serializer.serialize_time, + 'decimal': Serializer.serialize_decimal, + 'long': Serializer.serialize_long, + 'bytearray': Serializer.serialize_bytearray, + 'base64': Serializer.serialize_base64, + 'object': self.serialize_object, + '[]': self.serialize_iter, + '{}': self.serialize_dict + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data( + target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data( + target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get('readonly', False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == '': + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + ### Extract sub-data to serialize from model ### + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + ### Serialize this data ### + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc['type'], **kwargs) + + ### Incorporate this data in the right place ### + if is_xml_model_serialization: + xml_desc = attr_desc.get('xml', {}) + xml_name = xml_desc.get('name', attr_desc['key']) + xml_prefix = xml_desc.get('prefix', None) + xml_ns = xml_desc.get('ns', None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if 'name' not in getattr(orig_attr, '_xml_map', {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node( + xml_name, + xml_prefix, + xml_ns + ) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise err + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format( + attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + if data is None: + raise ValidationError("required", "body", True) + + # Just in case this is a dict + internal_data_type = data_type.strip('[]{}') + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback( + SerializationError, "Unable to build a model: "+str(err), err) + + if self.client_side_validation: + errors = _recursive_validate(data_type, data_type, data) + if errors: + raise errors[0] + return self._serialize(data, data_type, **kwargs) + + def _http_component_validation(self, data, data_type, name, **kwargs): + if self.client_side_validation: + # https://github.com/Azure/msrest-for-python/issues/85 + if data is not None and data_type in self.basic_types.values(): + data = self.serialize_basic(data, data_type, **kwargs) + data = self.validate(data, name, required=True, **kwargs) + return data + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + + if kwargs.get('skip_quote') is True: + output = str(output) + else: + output = quote(str(output), safe='') + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [ + self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" + for d + in data + ] + if not kwargs.get('skip_quote', False): + data = [ + quote(str(d), safe='') + for d + in data + ] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + if kwargs.get('skip_quote') is True: + output = str(output) + else: + output = quote(str(output), safe='') + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + data = self._http_component_validation(data, data_type, name, **kwargs) + try: + if data_type in ['[str]']: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == 'bool': + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + @classmethod + def validate(cls, data, name, **kwargs): + """Validate that a piece of data meets certain conditions""" + required = kwargs.get('required', False) + if required and data is None: + raise ValidationError("required", name, True) + elif data is None: + return + elif kwargs.get('readonly'): + return + + try: + for key, value in kwargs.items(): + validator = cls.validation.get(key, lambda x, y: False) + if validator(data, value): + raise ValidationError(key, name, value) + except TypeError: + raise ValidationError("unknown", name, "unknown") + else: + return data + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type]( + data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + if (isinstance(err, SerializationError)): + raise err # don't rewrap as SerializationError + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback( + SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == 'str': + return cls.serialize_unicode(data) + return eval(data_type)(data) + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise err + serialized.append(None) + + if div: + serialized = ['' if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if 'xml' in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get('xml', {}) + xml_name = xml_desc.get('name') + if not xml_name: + xml_name = serialization_ctxt['key'] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node( + xml_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node( + node_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data( + value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise err + serialized[self.serialize_unicode(key)] = None + + if 'xml' in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt['xml'] + xml_name = xml_desc['name'] + + final_result = _create_xml_node( + xml_name, + xml_desc.get('prefix', None), + xml_desc.get('ns', None) + ) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object( + value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object( + obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode('ascii') + return encoded.strip('=').replace('+', '-').replace('/', '_') + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], utc.tm_mday, + Serializer.months[utc.tm_mon], utc.tm_year, + utc.tm_hour, utc.tm_min, utc.tm_sec) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0') + if microseconds: + microseconds = '.'+microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, + utc.tm_hour, utc.tm_min, utc.tm_sec) + return date + microseconds + 'Z' + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning( + "Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc['key'] + working_data = data + + while '.' in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = '.'.join(dict_keys[1:]) + + return working_data.get(key) + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc['key'] + working_data = data + + while '.' in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = '.'.join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + """ + key = attr_desc['key'] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc['key'] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get('name', internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get('xml', {}) + xml_name = xml_desc.get('name', attr_desc['key']) + + # Look for a children + is_iter_type = attr_desc['type'].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get('ns', internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or 'name' not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and 'name' in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + )) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'} + + valid_date = re.compile( + r'\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}' + r'\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?') + + def __init__(self, classes=None): + self.deserialize_type = { + 'iso-8601': Deserializer.deserialize_iso, + 'rfc-1123': Deserializer.deserialize_rfc, + 'unix-time': Deserializer.deserialize_unix, + 'duration': Deserializer.deserialize_duration, + 'date': Deserializer.deserialize_date, + 'time': Deserializer.deserialize_time, + 'decimal': Deserializer.deserialize_decimal, + 'long': Deserializer.deserialize_long, + 'bytearray': Deserializer.deserialize_bytearray, + 'base64': Deserializer.deserialize_base64, + 'object': self.deserialize_object, + '[]': self.deserialize_iter, + '{}': self.deserialize_dict + } + self.deserialize_expected_types = { + 'duration': (isodate.Duration, datetime.timedelta), + 'iso-8601': (datetime.datetime) + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [ + rest_key_extractor, + xml_key_extractor + ] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, '_validation', {}).items() + if config.get('constant')] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig['type'] + internal_data_type = local_type.strip('[]{}') + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr( + data, + attr, + self._deserialize(local_type, value) + ) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == '': + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip('[]{}') + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ("Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" ) + _LOGGER.warning( + msg, + found_value, + key_extractor, + attr + ) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc['type']) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != '': + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = {_decode_attribute_map_key(_FLATTEN.split(desc['key'])[0]) + for desc in attribute_map.values() if desc['key'] != ''} + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", + exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # This avoids a circular dependency. We might want to consider RawDesializer is more generic + # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. + from .pipeline.universal import RawDeserializer + + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + #Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text(), + raw_data.headers + ) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, '_content_consumed'): + return RawDeserializer.deserialize_from_http_generics( + raw_data.text, + raw_data.headers + ) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, '_subtype_map', {}) + try: + readonly = [k for k, v in response._validation.items() + if v.get('readonly')] + const = [k for k, v in response._validation.items() + if v.get('constant')] + kwargs = {k: v for k, v in attrs.items() + if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format( + kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format( + iter_type, + type(attr) + )) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform <Key>value</Key> into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, 'str') + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object( + value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object( + obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node <a/> is empty string. + return '' + else: + # None or '', node <a/> with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == 'bool': + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ['true', '1']: + return True + elif attr.lower() in ['false', '0']: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == 'str': + return self.deserialize_unicode(attr) + return eval(data_type)(attr) + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = '=' * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace('-', '+').replace('_', '/') + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except(ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], + tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0)/60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split('.') + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except(ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/.venv/lib/python3.12/site-packages/msrest/service_client.py b/.venv/lib/python3.12/site-packages/msrest/service_client.py new file mode 100644 index 00000000..753c468c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/service_client.py @@ -0,0 +1,388 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +import logging +import os +import sys +try: + from urlparse import urljoin, urlparse +except ImportError: + from urllib.parse import urljoin, urlparse +import warnings + +from typing import List, Any, Dict, Union, IO, Tuple, Optional, Callable, Iterator, cast, TYPE_CHECKING # pylint: disable=unused-import + +from .authentication import Authentication +from .universal_http import ClientRequest, ClientResponse +from .universal_http.requests import ( + RequestsHTTPSender, +) +from .pipeline import Request, Pipeline, HTTPPolicy, SansIOHTTPPolicy +from .pipeline.requests import ( + PipelineRequestsHTTPSender, + RequestsCredentialsPolicy, + RequestsPatchSession +) +from .pipeline.universal import ( + HTTPLogger, + RawDeserializer +) + + +if TYPE_CHECKING: + from .configuration import Configuration # pylint: disable=unused-import + from .universal_http.requests import RequestsClientResponse # pylint: disable=unused-import + import requests # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +class SDKClient(object): + """The base class of all generated SDK client. + """ + def __init__(self, creds, config): + # type: (Any, Configuration) -> None + self._client = ServiceClient(creds, config) + + def close(self): + # type: () -> None + """Close the client if keep_alive is True. + """ + self._client.close() + + def __enter__(self): + # type: () -> SDKClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + self._client.__exit__(*exc_details) + + +class _ServiceClientCore(object): + """Service client core methods. + + This contains methods are sans I/O and not tight to sync or async implementation. + :param Configuration config: Service configuration. + :param Authentication creds: Authenticated credentials. + """ + + def __init__(self, config): + # type: (Any, Configuration) -> None + if config is None: + raise ValueError("Config is a required parameter") + self.config = config + + def _request(self, method, url, params, headers, content, form_content): + # type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create ClientRequest object. + + :param str url: URL for the request. + :param dict params: URL query parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = ClientRequest(method, self.format_url(url)) + + if params: + request.format_parameters(params) + + if headers: + request.headers.update(headers) + # All requests should contain a Accept. + # This should be done by Autorest, but wasn't in old Autorest + # Force it for now, but might deprecate it later. + if "Accept" not in request.headers: + _LOGGER.debug("Accept header absent and forced to application/json") + request.headers['Accept'] = 'application/json' + + if content is not None: + request.add_content(content) + + if form_content: + request.add_formdata(form_content) + + return request + + def stream_upload(self, data, callback): + """Generator for streaming request body data. + + :param data: A file-like object to be streamed. + :param callback: Custom callback for monitoring progress. + """ + while True: + chunk = data.read(self.config.connection.data_block_size) + if not chunk: + break + if callback and callable(callback): + callback(chunk, response=None) + yield chunk + + def format_url(self, url, **kwargs): + # type: (str, Any) -> str + """Format request URL with the client base URL, unless the + supplied URL is already absolute. + + :param str url: The request URL to be formatted if necessary. + """ + url = url.format(**kwargs) + parsed = urlparse(url) + if not parsed.scheme or not parsed.netloc: + url = url.lstrip('/') + base = self.config.base_url.format(**kwargs).rstrip('/') + url = urljoin(base + '/', url) + return url + + def get(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a GET request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('GET', url, params, headers, content, form_content) + request.method = 'GET' + return request + + def put(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a PUT request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('PUT', url, params, headers, content, form_content) + return request + + def post(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a POST request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('POST', url, params, headers, content, form_content) + return request + + def head(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a HEAD request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('HEAD', url, params, headers, content, form_content) + return request + + def patch(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a PATCH request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('PATCH', url, params, headers, content, form_content) + return request + + def delete(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a DELETE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('DELETE', url, params, headers, content, form_content) + return request + + def merge(self, url, params=None, headers=None, content=None, form_content=None): + # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest + """Create a MERGE request object. + + :param str url: The request URL. + :param dict params: Request URL parameters. + :param dict headers: Headers + :param dict form_content: Form content + """ + request = self._request('MERGE', url, params, headers, content, form_content) + return request + + +class ServiceClient(_ServiceClientCore): + """REST Service Client. + Maintains client pipeline and handles all requests and responses. + + :param creds: Deprecated, will be removed in next major version. Creds are now read from config.credentials. + :param Configuration config: Service configuration. + """ + + def __init__(self, creds, config): + # type: (Any, Configuration) -> None + super(ServiceClient, self).__init__(config) + + # If not Autorest, check if credentials comes from here and not config + if creds and config.credentials is None: + warnings.warn("Creds parameter is deprecated. Set config.credentials instead.", + DeprecationWarning) + config.credentials = creds + + self.config.pipeline = self._create_default_pipeline() + + def _create_default_pipeline(self): + # type: () -> Pipeline[ClientRequest, RequestsClientResponse] + creds = self.config.credentials + + policies = [ + self.config.user_agent_policy, # UserAgent policy + RequestsPatchSession(), # Support deprecated operation config at the session level + self.config.http_logger_policy # HTTP request/response log + ] # type: List[Union[HTTPPolicy, SansIOHTTPPolicy]] + if creds: + if isinstance(creds, (HTTPPolicy, SansIOHTTPPolicy)): + policies.insert(1, creds) + else: + # Assume this is the old credentials class, and then requests. Wrap it. + policies.insert(1, RequestsCredentialsPolicy(creds)) # Set credentials for requests based session + + return Pipeline( + policies, + PipelineRequestsHTTPSender(RequestsHTTPSender(self.config)) # Send HTTP request using requests + ) + + def __enter__(self): + # type: () -> ServiceClient + self.config.keep_alive = True + self.config.pipeline.__enter__() + return self + + def __exit__(self, *exc_details): + self.config.pipeline.__exit__(*exc_details) + self.config.keep_alive = False + + def close(self): + # type: () -> None + """Close the pipeline if keep_alive is True. + """ + self.config.pipeline.__exit__() # type: ignore + + def send_formdata(self, request, headers=None, content=None, **config): + """Send data as a multipart form-data request. + We only deal with file-like objects or strings at this point. + The requests is not yet streamed. + + This method is deprecated, and shouldn't be used anymore. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param dict content: Dictionary of the fields of the formdata. + :param config: Any specific config overrides. + """ + request.headers = headers + request.add_formdata(content) + return self.send(request, **config) + + def send(self, request, headers=None, content=None, **kwargs): + """Prepare and send request object according to configuration. + + :param ClientRequest request: The request object to be sent. + :param dict headers: Any headers to add to the request. + :param content: Any body data to add to the request. + :param config: Any specific config overrides + """ + # "content" and "headers" are deprecated, only old SDK + if headers: + request.headers.update(headers) + if not request.files and request.data is None and content is not None: + request.add_content(content) + # End of deprecation + + response = None + kwargs.setdefault('stream', True) + try: + pipeline_response = self.config.pipeline.run(request, **kwargs) + # There is too much thing that expects this method to return a "requests.Response" + # to break it in a compatible release. + # Also, to be pragmatic in the "sync" world "requests" rules anyway. + # However, attach the Universal HTTP response + # to get the streaming generator. + response = pipeline_response.http_response.internal_response + response._universal_http_response = pipeline_response.http_response + response.context = pipeline_response.context + return response + finally: + self._close_local_session_if_necessary(response, kwargs['stream']) + + def _close_local_session_if_necessary(self, response, stream): + # Here, it's a local session, I might close it. + if not self.config.keep_alive and (not response or not stream): + self.config.pipeline._sender.driver.session.close() + + def stream_download(self, data, callback): + # type: (Union[requests.Response, ClientResponse], Callable) -> Iterator[bytes] + """Generator for streaming request body data. + + :param data: A response object to be streamed. + :param callback: Custom callback for monitoring progress. + """ + block = self.config.connection.data_block_size + try: + # Assume this is ClientResponse, which it should be if backward compat was not important + return cast(ClientResponse, data).stream_download(block, callback) + except AttributeError: + try: + # Assume this is the patched requests.Response from "send" + return data._universal_http_response.stream_download(block, callback) # type: ignore + except AttributeError: + # Assume this is a raw requests.Response + from .universal_http.requests import RequestsClientResponse + response = RequestsClientResponse(None, data) + return response.stream_download(block, callback) + + def add_header(self, header, value): + # type: (str, str) -> None + """Add a persistent header - this header will be applied to all + requests sent during the current client session. + + .. deprecated:: 0.5.0 + Use config.headers instead + + :param str header: The header name. + :param str value: The header value. + """ + warnings.warn("Private attribute _client.add_header is deprecated. Use config.headers instead.", + DeprecationWarning) + self.config.headers[header] = value diff --git a/.venv/lib/python3.12/site-packages/msrest/universal_http/__init__.py b/.venv/lib/python3.12/site-packages/msrest/universal_http/__init__.py new file mode 100644 index 00000000..60927559 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/universal_http/__init__.py @@ -0,0 +1,460 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import abc +try: + import configparser + from configparser import NoOptionError +except ImportError: + import ConfigParser as configparser # type: ignore + from ConfigParser import NoOptionError # type: ignore +import json +import logging +import os.path +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +import xml.etree.ElementTree as ET + +from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator, MutableMapping # pylint: disable=unused-import + +HTTPResponseType = TypeVar("HTTPResponseType", bound='HTTPClientResponse') + +# This file is NOT using any "requests" HTTP implementation +# However, the CaseInsensitiveDict is handy. +# If one day we reach the point where "requests" can be skip totally, +# might provide our own implementation +from requests.structures import CaseInsensitiveDict + +from ..exceptions import ClientRequestError, raise_with_traceback + +if TYPE_CHECKING: + from ..serialization import Model # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + +try: + ABC = abc.ABC +except AttributeError: # Python 2.7, abc exists, but not ABC + ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore + +try: + from contextlib import AbstractContextManager # type: ignore +except ImportError: # Python <= 3.5 + class AbstractContextManager(object): # type: ignore + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + def __exit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + + +class HTTPSender(AbstractContextManager, ABC): + """An http sender ABC. + """ + + @abc.abstractmethod + def send(self, request, **config): + # type: (ClientRequest, Any) -> ClientResponse + """Send the request using this HTTP sender. + """ + pass + + +class HTTPSenderConfiguration(object): + """HTTP sender configuration. + + This is composed of generic HTTP configuration, and could be use as a common + HTTP configuration format. + + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, filepath=None): + # Communication configuration + self.connection = ClientConnection() + + # Headers (sent with every requests) + self.headers = {} # type: Dict[str, str] + + # ProxyConfiguration + self.proxies = ClientProxies() + + # Redirect configuration + self.redirect_policy = ClientRedirectPolicy() + + self._config = configparser.ConfigParser() + self._config.optionxform = str # type: ignore + + if filepath: + self.load(filepath) + + def _clear_config(self): + # type: () -> None + """Clearout config object in memory.""" + for section in self._config.sections(): + self._config.remove_section(section) + + def save(self, filepath): + # type: (str) -> None + """Save current configuration to file. + + :param str filepath: Path to file where settings will be saved. + :raises: ValueError if supplied filepath cannot be written to. + """ + sections = [ + "Connection", + "Proxies", + "RedirectPolicy"] + for section in sections: + self._config.add_section(section) + + self._config.set("Connection", "timeout", self.connection.timeout) + self._config.set("Connection", "verify", self.connection.verify) + self._config.set("Connection", "cert", self.connection.cert) + + self._config.set("Proxies", "proxies", self.proxies.proxies) + self._config.set("Proxies", "env_settings", + self.proxies.use_env_settings) + + self._config.set("RedirectPolicy", "allow", self.redirect_policy.allow) + self._config.set("RedirectPolicy", "max_redirects", + self.redirect_policy.max_redirects) + + try: + with open(filepath, 'w') as configfile: + self._config.write(configfile) + except (KeyError, EnvironmentError): + error = "Supplied config filepath invalid." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + + def load(self, filepath): + # type: (str) -> None + """Load configuration from existing file. + + :param str filepath: Path to existing config file. + :raises: ValueError if supplied config file is invalid. + """ + try: + self._config.read(filepath) + import ast + self.connection.timeout = \ + self._config.getint("Connection", "timeout") + self.connection.verify = \ + self._config.getboolean("Connection", "verify") + self.connection.cert = \ + self._config.get("Connection", "cert") + + self.proxies.proxies = \ + ast.literal_eval(self._config.get("Proxies", "proxies")) + self.proxies.use_env_settings = \ + self._config.getboolean("Proxies", "env_settings") + + self.redirect_policy.allow = \ + self._config.getboolean("RedirectPolicy", "allow") + self.redirect_policy.max_redirects = \ + self._config.getint("RedirectPolicy", "max_redirects") + + except (ValueError, EnvironmentError, NoOptionError): + error = "Supplied config file incompatible." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + + +class ClientRequest(object): + """Represents a HTTP request. + + URL can be given without query parameters, to be added later using "format_parameters". + + Instance can be created without data, to be added later using "add_content" + + Instance can be created without files, to be added later using "add_formdata" + + :param str method: HTTP method (GET, HEAD, etc.) + :param str url: At least complete scheme/host/path + :param dict[str,str] headers: HTTP headers + :param files: Files list. + :param data: Body to be sent. + :type data: bytes or str. + """ + def __init__(self, method, url, headers=None, files=None, data=None): + # type: (str, str, Mapping[str, str], Any, Any) -> None + self.method = method + self.url = url + self.headers = CaseInsensitiveDict(headers) + self.files = files + self.data = data + + def __repr__(self): + return '<ClientRequest [%s]>' % (self.method) + + @property + def body(self): + """Alias to data.""" + return self.data + + @body.setter + def body(self, value): + self.data = value + + def format_parameters(self, params): + # type: (Dict[str, str]) -> None + """Format parameters into a valid query string. + It's assumed all parameters have already been quoted as + valid URL strings. + + :param dict params: A dictionary of parameters. + """ + query = urlparse(self.url).query + if query: + self.url = self.url.partition('?')[0] + existing_params = { + p[0]: p[-1] + for p in [p.partition('=') for p in query.split('&')] + } + params.update(existing_params) + query_params = ["{}={}".format(k, v) for k, v in params.items()] + query = '?' + '&'.join(query_params) + self.url = self.url + query + + def add_content(self, data): + # type: (Optional[Union[Dict[str, Any], ET.Element]]) -> None + """Add a body to the request. + + :param data: Request body data, can be a json serializable + object (e.g. dictionary) or a generator (e.g. file data). + """ + if data is None: + return + + if isinstance(data, ET.Element): + bytes_data = ET.tostring(data, encoding="utf8") + self.headers['Content-Length'] = str(len(bytes_data)) + self.data = bytes_data + return + + # By default, assume JSON + try: + self.data = json.dumps(data) + self.headers['Content-Length'] = str(len(self.data)) + except TypeError: + self.data = data + + @staticmethod + def _format_data(data): + # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] + """Format field data according to whether it is a stream or + a string for a form-data request. + + :param data: The request field data. + :type data: str or file-like object. + """ + if hasattr(data, 'read'): + data = cast(IO, data) + data_name = None + try: + if data.name[0] != '<' and data.name[-1] != '>': + data_name = os.path.basename(data.name) + except (AttributeError, TypeError): + pass + return (data_name, data, "application/octet-stream") + return (None, cast(str, data)) + + def add_formdata(self, content=None): + # type: (Optional[Dict[str, str]]) -> None + """Add data as a multipart form-data request to the request. + + We only deal with file-like objects or strings at this point. + The requests is not yet streamed. + + :param dict headers: Any headers to add to the request. + :param dict content: Dictionary of the fields of the formdata. + """ + if content is None: + content = {} + content_type = self.headers.pop('Content-Type', None) if self.headers else None + + if content_type and content_type.lower() == 'application/x-www-form-urlencoded': + # Do NOT use "add_content" that assumes input is JSON + self.data = {f: d for f, d in content.items() if d is not None} + else: # Assume "multipart/form-data" + self.files = {f: self._format_data(d) for f, d in content.items() if d is not None} + +class HTTPClientResponse(object): + """Represent a HTTP response. + + No body is defined here on purpose, since async pipeline + will provide async ways to access the body + + You have two different types of body: + - Full in-memory using "body" as bytes + """ + def __init__(self, request, internal_response): + # type: (ClientRequest, Any) -> None + self.request = request + self.internal_response = internal_response + self.status_code = None # type: Optional[int] + self.headers = {} # type: MutableMapping[str, str] + self.reason = None # type: Optional[str] + + def body(self): + # type: () -> bytes + """Return the whole body as bytes in memory. + """ + pass + + def text(self, encoding=None): + # type: (str) -> str + """Return the whole body as a string. + + :param str encoding: The encoding to apply. If None, use "utf-8-sig". + Implementation can be smarter if they want (using headers). + """ + return self.body().decode(encoding or "utf-8-sig") + + def raise_for_status(self): + """Raise for status. Should be overridden, but basic implementation provided. + """ + if self.status_code >= 400: + raise ClientRequestError("Received status code {}".format(self.status_code)) + + +class ClientResponse(HTTPClientResponse): + + def stream_download(self, chunk_size=None, callback=None): + # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] + """Generator for streaming request body data. + + Should be implemented by sub-classes if streaming download + is supported. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + pass + + +class ClientRedirectPolicy(object): + """Redirect configuration settings. + """ + + def __init__(self): + self.allow = True + self.max_redirects = 30 + + def __bool__(self): + # type: () -> bool + """Whether redirects are allowed.""" + return self.allow + + def __call__(self): + # type: () -> int + """Return configuration to be applied to connection.""" + debug = "Configuring redirects: allow=%r, max=%r" + _LOGGER.debug(debug, self.allow, self.max_redirects) + return self.max_redirects + + +class ClientProxies(object): + """Proxy configuration settings. + Proxies can also be configured using HTTP_PROXY and HTTPS_PROXY + environment variables, in which case set use_env_settings to True. + """ + + def __init__(self): + self.proxies = {} + self.use_env_settings = True + + def __call__(self): + # type: () -> Dict[str, str] + """Return configuration to be applied to connection.""" + proxy_string = "\n".join( + [" {}: {}".format(k, v) for k, v in self.proxies.items()]) + + _LOGGER.debug("Configuring proxies: %r", proxy_string) + debug = "Evaluate proxies against ENV settings: %r" + _LOGGER.debug(debug, self.use_env_settings) + return self.proxies + + def add(self, protocol, proxy_url): + # type: (str, str) -> None + """Add proxy. + + :param str protocol: Protocol for which proxy is to be applied. Can + be 'http', 'https', etc. Can also include host. + :param str proxy_url: The proxy URL. Where basic auth is required, + use the format: http://user:password@host + """ + self.proxies[protocol] = proxy_url + + +class ClientConnection(object): + """Request connection configuration settings. + """ + + def __init__(self): + self.timeout = 100 + self.verify = True + self.cert = None + self.data_block_size = 4096 + + def __call__(self): + # type: () -> Dict[str, Union[str, int]] + """Return configuration to be applied to connection.""" + debug = "Configuring request: timeout=%r, verify=%r, cert=%r" + _LOGGER.debug(debug, self.timeout, self.verify, self.cert) + return {'timeout': self.timeout, + 'verify': self.verify, + 'cert': self.cert} + + +__all__ = [ + 'ClientRequest', + 'ClientResponse', + 'HTTPSender', + # Generic HTTP configuration + 'HTTPSenderConfiguration', + 'ClientRedirectPolicy', + 'ClientProxies', + 'ClientConnection' +] + +try: + from .async_abc import AsyncHTTPSender, AsyncClientResponse # pylint: disable=unused-import + from .async_abc import __all__ as _async_all + __all__ += _async_all +except SyntaxError: # Python 2 + pass +except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode + pass diff --git a/.venv/lib/python3.12/site-packages/msrest/universal_http/aiohttp.py b/.venv/lib/python3.12/site-packages/msrest/universal_http/aiohttp.py new file mode 100644 index 00000000..0be8f299 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/universal_http/aiohttp.py @@ -0,0 +1,101 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +from typing import Any, Callable, AsyncIterator, Optional + +import aiohttp +from multidict import CIMultiDict + +from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse + +# Matching requests, because why not? +CONTENT_CHUNK_SIZE = 10 * 1024 + + +class AioHTTPSender(AsyncHTTPSender): + """AioHttp HTTP sender implementation. + """ + + def __init__(self, *, loop=None): + self._session = aiohttp.ClientSession(loop=loop) + + async def __aenter__(self): + await self._session.__aenter__() + return self + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + await self._session.__aexit__(*exc_details) + + async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: + """Send the request using this HTTP sender. + + Will pre-load the body into memory to be available with a sync method. + pass stream=True to avoid this behavior. + """ + result = await self._session.request( + request.method, + request.url, + **config + ) + response = AioHttpClientResponse(request, result) + if not config.get("stream", False): + await response.load_body() + return response + + +class AioHttpClientResponse(AsyncClientResponse): + def __init__(self, request: ClientRequest, aiohttp_response: aiohttp.ClientResponse) -> None: + super(AioHttpClientResponse, self).__init__(request, aiohttp_response) + # https://aiohttp.readthedocs.io/en/stable/client_reference.html#aiohttp.ClientResponse + self.status_code = aiohttp_response.status + self.headers = CIMultiDict(aiohttp_response.headers) + self.reason = aiohttp_response.reason + self._body = None + + def body(self) -> bytes: + """Return the whole body as bytes in memory. + """ + if not self._body: + raise ValueError("Body is not available. Call async method load_body, or do your call with stream=False.") + return self._body + + async def load_body(self) -> None: + """Load in memory the body, so it could be accessible from sync methods.""" + self._body = await self.internal_response.read() + + def raise_for_status(self): + self.internal_response.raise_for_status() + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: + """Generator for streaming request body data. + """ + chunk_size = chunk_size or CONTENT_CHUNK_SIZE + async def async_gen(resp): + while True: + chunk = await resp.content.read(chunk_size) + if not chunk: + break + callback(chunk, resp) + return async_gen(self.internal_response) diff --git a/.venv/lib/python3.12/site-packages/msrest/universal_http/async_abc.py b/.venv/lib/python3.12/site-packages/msrest/universal_http/async_abc.py new file mode 100644 index 00000000..1b7f4c7c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/universal_http/async_abc.py @@ -0,0 +1,90 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import abc + +from typing import Any, List, Union, Callable, AsyncIterator, Optional + +try: + from contextlib import AbstractAsyncContextManager # type: ignore +except ImportError: # Python <= 3.7 + class AbstractAsyncContextManager(object): # type: ignore + async def __aenter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_value, traceback): + """Raise any exception triggered within the runtime context.""" + return None + +from . import ClientRequest, HTTPClientResponse + + +class AsyncClientResponse(HTTPClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: + """Generator for streaming request body data. + + Should be implemented by sub-classes if streaming download + is supported. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + pass + + +class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC): + """An http sender ABC. + """ + + @abc.abstractmethod + async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: + """Send the request using this HTTP sender. + """ + pass + + def build_context(self) -> Any: + """Allow the sender to build a context that will be passed + across the pipeline with the request. + + Return type has no constraints. Implementation is not + required and None by default. + """ + return None + + def __enter__(self): + raise TypeError("Use 'async with' instead") + + def __exit__(self, exc_type, exc_val, exc_tb): + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + +__all__ = [ + 'AsyncHTTPSender', + 'AsyncClientResponse' +]
\ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/msrest/universal_http/async_requests.py b/.venv/lib/python3.12/site-packages/msrest/universal_http/async_requests.py new file mode 100644 index 00000000..40a3a352 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/universal_http/async_requests.py @@ -0,0 +1,240 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +import asyncio +from collections.abc import AsyncIterator +import functools +import logging +from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback) +from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse +from .requests import ( + BasicRequestsHTTPSender, + RequestsHTTPSender, + HTTPRequestsClientResponse +) + + +_LOGGER = logging.getLogger(__name__) + + +class AsyncBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore + + async def __aenter__(self): + return super(AsyncBasicRequestsHTTPSender, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(AsyncBasicRequestsHTTPSender, self).__exit__() + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + + loop = kwargs.get("loop", asyncio.get_event_loop()) + future = loop.run_in_executor( + None, + functools.partial( + session.request, + request.method, + request.url, + **kwargs + ) + ) + try: + return AsyncRequestsClientResponse( + request, + await future + ) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + +class AsyncRequestsHTTPSender(AsyncBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs) + + +class _MsrestStopIteration(Exception): + pass + +def _msrest_next(iterator): + """"To avoid: + TypeError: StopIteration interacts badly with generators and cannot be raised into a Future + """ + try: + return next(iterator) + except StopIteration: + raise _MsrestStopIteration() + +class StreamDownloadGenerator(AsyncIterator): + + def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: + self.response = response + self.block = block or CONTENT_CHUNK_SIZE + self.user_callback = user_callback + self.iter_content_func = self.response.iter_content(self.block) + + async def __anext__(self): + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor( + None, + _msrest_next, + self.iter_content_func, + ) + if not chunk: + raise _MsrestStopIteration() + if self.user_callback and callable(self.user_callback): + self.user_callback(chunk, self.response) + return chunk + except _MsrestStopIteration: + self.response.close() + raise StopAsyncIteration() + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + self.response.close() + raise + +class AsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + return StreamDownloadGenerator( + self.internal_response, + callback, + chunk_size + ) + + +# Trio support +try: + import trio + + class TrioStreamDownloadGenerator(AsyncIterator): + + def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: + self.response = response + self.block = block or CONTENT_CHUNK_SIZE + self.user_callback = user_callback + self.iter_content_func = self.response.iter_content(self.block) + + async def __anext__(self): + try: + chunk = await trio.to_thread.run_sync( + _msrest_next, + self.iter_content_func, + ) + if not chunk: + raise _MsrestStopIteration() + if self.user_callback and callable(self.user_callback): + self.user_callback(chunk, self.response) + return chunk + except _MsrestStopIteration: + self.response.close() + raise StopAsyncIteration() + except Exception as err: + _LOGGER.warning("Unable to stream download: %s", err) + self.response.close() + raise + + class TrioAsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): + + def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + return TrioStreamDownloadGenerator( + self.internal_response, + callback, + chunk_size + ) + + + class AsyncTrioBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore + + async def __aenter__(self): + return super(AsyncTrioBasicRequestsHTTPSender, self).__enter__() + + async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ + return super(AsyncTrioBasicRequestsHTTPSender, self).__exit__() + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + + trio_limiter = kwargs.get("trio_limiter", None) + future = trio.to_thread.run_sync( + functools.partial( + session.request, + request.method, + request.url, + **kwargs + ), + limiter=trio_limiter + ) + try: + return TrioAsyncRequestsClientResponse( + request, + await future + ) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + + class AsyncTrioRequestsHTTPSender(AsyncTrioBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore + + async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore + """Send the request using this HTTP sender. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return await super(AsyncTrioRequestsHTTPSender, self).send(request, **requests_kwargs) + +except ImportError: + # trio not installed + pass
\ No newline at end of file diff --git a/.venv/lib/python3.12/site-packages/msrest/universal_http/requests.py b/.venv/lib/python3.12/site-packages/msrest/universal_http/requests.py new file mode 100644 index 00000000..e5e04632 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/universal_http/requests.py @@ -0,0 +1,464 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- +""" +This module is the requests implementation of Pipeline ABC +""" +from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 +import contextlib +import logging +import threading +from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import +import warnings + +try: + from configparser import NoOptionError +except ImportError: + from ConfigParser import NoOptionError # type: ignore + +from oauthlib import oauth2 +import requests +from requests.models import CONTENT_CHUNK_SIZE + +from urllib3 import Retry # Needs requests 2.16 at least to be safe + +from ..exceptions import ( + TokenExpiredError, + ClientRequestError, + raise_with_traceback) +from . import HTTPSender, HTTPClientResponse, ClientResponse, HTTPSenderConfiguration + +if TYPE_CHECKING: + from . import ClientRequest # pylint: disable=unused-import + + +_LOGGER = logging.getLogger(__name__) + + + +class HTTPRequestsClientResponse(HTTPClientResponse): + def __init__(self, request, requests_response): + super(HTTPRequestsClientResponse, self).__init__(request, requests_response) + self.status_code = requests_response.status_code + self.headers = requests_response.headers + self.reason = requests_response.reason + + def body(self): + return self.internal_response.content + + def text(self, encoding=None): + if encoding: + self.internal_response.encoding = encoding + return self.internal_response.text + + def raise_for_status(self): + self.internal_response.raise_for_status() + +class RequestsClientResponse(HTTPRequestsClientResponse, ClientResponse): + + def stream_download(self, chunk_size=None, callback=None): + # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] + """Generator for streaming request body data. + + :param callback: Custom callback for monitoring progress. + :param int chunk_size: + """ + chunk_size = chunk_size or CONTENT_CHUNK_SIZE + with contextlib.closing(self.internal_response) as response: + # https://github.com/PyCQA/pylint/issues/1437 + for chunk in response.iter_content(chunk_size): # pylint: disable=no-member + if not chunk: + break + if callback and callable(callback): + callback(chunk, response=response) + yield chunk + + +class BasicRequestsHTTPSender(HTTPSender): + """Implements a basic requests HTTP sender. + + Since requests team recommends to use one session per requests, you should + not consider this class as thread-safe, since it will use one Session + per instance. + + In this simple implementation: + - You provide the configured session if you want to, or a basic session is created. + - All kwargs received by "send" are sent to session.request directly + """ + + def __init__(self, session=None): + # type: (Optional[requests.Session]) -> None + self.session = session or requests.Session() + + def __enter__(self): + # type: () -> BasicRequestsHTTPSender + return self + + def __exit__(self, *exc_details): # pylint: disable=arguments-differ + self.close() + + def close(self): + self.session.close() + + def send(self, request, **kwargs): + # type: (ClientRequest, Any) -> ClientResponse + """Send request object according to configuration. + + Allowed kwargs are: + - session : will override the driver session and use yours. Should NOT be done unless really required. + - anything else is sent straight to requests. + + :param ClientRequest request: The request object to be sent. + """ + # It's not recommended to provide its own session, and is mostly + # to enable some legacy code to plug correctly + session = kwargs.pop('session', self.session) + try: + response = session.request( + request.method, + request.url, + **kwargs) + except requests.RequestException as err: + msg = "Error occurred in request." + raise_with_traceback(ClientRequestError, msg, err) + + return RequestsClientResponse(request, response) + + +def _patch_redirect(session): + # type: (requests.Session) -> None + """Whether redirect policy should be applied based on status code. + + HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. + But requests does, to follow browser more than spec + https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 + + This patches "requests" to be more HTTP compliant. + + Note that this is super dangerous, since technically this is not public API. + """ + def enforce_http_spec(resp, request): + if resp.status_code in (301, 302) and \ + request.method not in ['GET', 'HEAD']: + return False + return True + + redirect_logic = session.resolve_redirects + + def wrapped_redirect(resp, req, **kwargs): + attempt = enforce_http_spec(resp, req) + return redirect_logic(resp, req, **kwargs) if attempt else [] + wrapped_redirect.is_msrest_patched = True # type: ignore + + session.resolve_redirects = wrapped_redirect # type: ignore + +class RequestsHTTPSender(BasicRequestsHTTPSender): + """A requests HTTP sender that can consume a msrest.Configuration object. + + This instance will consume the following configuration attributes: + - connection + - proxies + - retry_policy + - redirect_policy + - enable_http_logger + - hooks + - session_configuration_callback + """ + + _protocols = ['http://', 'https://'] + + # Set of authorized kwargs at the operation level + _REQUESTS_KWARGS = [ + 'cookies', + 'verify', + 'timeout', + 'allow_redirects', + 'proxies', + 'verify', + 'cert' + ] + + def __init__(self, config=None): + # type: (Optional[RequestHTTPSenderConfiguration]) -> None + self._session_mapping = threading.local() + self.config = config or RequestHTTPSenderConfiguration() + super(RequestsHTTPSender, self).__init__() + + @property # type: ignore + def session(self): + try: + return self._session_mapping.session + except AttributeError: + self._session_mapping.session = requests.Session() + self._init_session(self._session_mapping.session) + return self._session_mapping.session + + @session.setter + def session(self, value): + self._init_session(value) + self._session_mapping.session = value + + def _init_session(self, session): + # type: (requests.Session) -> None + """Init session level configuration of requests. + + This is initialization I want to do once only on a session. + """ + _patch_redirect(session) + + # Change max_retries in current all installed adapters + max_retries = self.config.retry_policy() + for protocol in self._protocols: + session.adapters[protocol].max_retries = max_retries + + def _configure_send(self, request, **kwargs): + # type: (ClientRequest, Any) -> Dict[str, str] + """Configure the kwargs to use with requests. + + See "send" for kwargs details. + + :param ClientRequest request: The request object to be sent. + :returns: The requests.Session.request kwargs + :rtype: dict[str,str] + """ + requests_kwargs = {} # type: Any + session = kwargs.pop('session', self.session) + + # If custom session was not create here + if session is not self.session: + self._init_session(session) + + session.max_redirects = int(self.config.redirect_policy()) + session.trust_env = bool(self.config.proxies.use_env_settings) + + # Initialize requests_kwargs with "config" value + requests_kwargs.update(self.config.connection()) + requests_kwargs['allow_redirects'] = bool(self.config.redirect_policy) + requests_kwargs['headers'] = self.config.headers.copy() + + proxies = self.config.proxies() + if proxies: + requests_kwargs['proxies'] = proxies + + # Replace by operation level kwargs + # We allow some of them, since some like stream or json are controlled by msrest + for key in kwargs: + if key in self._REQUESTS_KWARGS: + requests_kwargs[key] = kwargs[key] + + # Hooks. Deprecated, should be a policy + def make_user_hook_cb(user_hook, session): + def user_hook_cb(r, *args, **kwargs): + kwargs.setdefault("msrest", {})['session'] = session + return user_hook(r, *args, **kwargs) + return user_hook_cb + + hooks = [] + for user_hook in self.config.hooks: + hooks.append(make_user_hook_cb(user_hook, self.session)) + + if hooks: + requests_kwargs['hooks'] = {'response': hooks} + + # Configuration callback. Deprecated, should be a policy + output_kwargs = self.config.session_configuration_callback( + session, + self.config, + kwargs, + **requests_kwargs + ) + if output_kwargs is not None: + requests_kwargs = output_kwargs + + # If custom session was not create here + if session is not self.session: + requests_kwargs['session'] = session + + ### Autorest forced kwargs now ### + + # If Autorest needs this response to be streamable. True for compat. + requests_kwargs['stream'] = kwargs.get('stream', True) + + if request.files: + requests_kwargs['files'] = request.files + elif request.data: + requests_kwargs['data'] = request.data + requests_kwargs['headers'].update(request.headers) + + return requests_kwargs + + def send(self, request, **kwargs): + # type: (ClientRequest, Any) -> ClientResponse + """Send request object according to configuration. + + Available kwargs: + - session : will override the driver session and use yours. Should NOT be done unless really required. + - A subset of what requests.Session.request can receive: + + - cookies + - verify + - timeout + - allow_redirects + - proxies + - verify + - cert + + Everything else will be silently ignored. + + :param ClientRequest request: The request object to be sent. + """ + requests_kwargs = self._configure_send(request, **kwargs) + return super(RequestsHTTPSender, self).send(request, **requests_kwargs) + + +class ClientRetryPolicy(object): + """Retry configuration settings. + Container for retry policy object. + """ + + safe_codes = [i for i in range(500) if i != 408] + [501, 505] + + def __init__(self): + self.policy = Retry() + self.policy.total = 3 + self.policy.connect = 3 + self.policy.read = 3 + self.policy.backoff_factor = 0.8 + self.policy.BACKOFF_MAX = 90 + + retry_codes = [i for i in range(999) if i not in self.safe_codes] + self.policy.status_forcelist = retry_codes + self.policy.method_whitelist = ['HEAD', 'TRACE', 'GET', 'PUT', + 'OPTIONS', 'DELETE', 'POST', 'PATCH'] + + def __call__(self): + # type: () -> Retry + """Return configuration to be applied to connection.""" + debug = ("Configuring retry: max_retries=%r, " + "backoff_factor=%r, max_backoff=%r") + _LOGGER.debug( + debug, self.retries, self.backoff_factor, self.max_backoff) + return self.policy + + @property + def retries(self): + # type: () -> int + """Total number of allowed retries.""" + return self.policy.total + + @retries.setter + def retries(self, value): + # type: (int) -> None + self.policy.total = value + self.policy.connect = value + self.policy.read = value + + @property + def backoff_factor(self): + # type: () -> Union[int, float] + """Factor by which back-off delay is incrementally increased.""" + return self.policy.backoff_factor + + @backoff_factor.setter + def backoff_factor(self, value): + # type: (Union[int, float]) -> None + self.policy.backoff_factor = value + + @property + def max_backoff(self): + # type: () -> int + """Max retry back-off delay.""" + return self.policy.BACKOFF_MAX + + @max_backoff.setter + def max_backoff(self, value): + # type: (int) -> None + self.policy.BACKOFF_MAX = value + +def default_session_configuration_callback(session, global_config, local_config, **kwargs): # pylint: disable=unused-argument + # type: (requests.Session, RequestHTTPSenderConfiguration, Dict[str,str], str) -> Dict[str, str] + """Configuration callback if you need to change default session configuration. + + :param requests.Session session: The session. + :param Configuration global_config: The global configuration. + :param dict[str,str] local_config: The on-the-fly configuration passed on the call. + :param dict[str,str] kwargs: The current computed values for session.request method. + :return: Must return kwargs, to be passed to session.request. If None is return, initial kwargs will be used. + :rtype: dict[str,str] + """ + return kwargs + +class RequestHTTPSenderConfiguration(HTTPSenderConfiguration): + """Requests specific HTTP sender configuration. + + :param str filepath: Path to existing config file (optional). + """ + + def __init__(self, filepath=None): + # type: (Optional[str]) -> None + + super(RequestHTTPSenderConfiguration, self).__init__() + + # Retry configuration + self.retry_policy = ClientRetryPolicy() + + # Requests hooks. Must respect requests hook callback signature + # Note that we will inject the following parameters: + # - kwargs['msrest']['session'] with the current session + self.hooks = [] # type: List[Callable[[requests.Response, str, str], None]] + + self.session_configuration_callback = default_session_configuration_callback + + if filepath: + self.load(filepath) + + def save(self, filepath): + """Save current configuration to file. + + :param str filepath: Path to file where settings will be saved. + :raises: ValueError if supplied filepath cannot be written to. + """ + self._config.add_section("RetryPolicy") + self._config.set("RetryPolicy", "retries", str(self.retry_policy.retries)) + self._config.set("RetryPolicy", "backoff_factor", + str(self.retry_policy.backoff_factor)) + self._config.set("RetryPolicy", "max_backoff", + str(self.retry_policy.max_backoff)) + super(RequestHTTPSenderConfiguration, self).save(filepath) + + def load(self, filepath): + try: + self.retry_policy.retries = \ + self._config.getint("RetryPolicy", "retries") + self.retry_policy.backoff_factor = \ + self._config.getfloat("RetryPolicy", "backoff_factor") + self.retry_policy.max_backoff = \ + self._config.getint("RetryPolicy", "max_backoff") + except (ValueError, EnvironmentError, NoOptionError): + error = "Supplied config file incompatible." + raise_with_traceback(ValueError, error) + finally: + self._clear_config() + super(RequestHTTPSenderConfiguration, self).load(filepath) diff --git a/.venv/lib/python3.12/site-packages/msrest/version.py b/.venv/lib/python3.12/site-packages/msrest/version.py new file mode 100644 index 00000000..27e090ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/msrest/version.py @@ -0,0 +1,28 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +#: version of this package. Use msrest.__version__ instead +msrest_version = "0.7.1" |