about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/boto3/s3
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/boto3/s3
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/boto3/s3')
-rw-r--r--.venv/lib/python3.12/site-packages/boto3/s3/__init__.py12
-rw-r--r--.venv/lib/python3.12/site-packages/boto3/s3/constants.py17
-rw-r--r--.venv/lib/python3.12/site-packages/boto3/s3/inject.py926
-rw-r--r--.venv/lib/python3.12/site-packages/boto3/s3/transfer.py438
4 files changed, 1393 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/boto3/s3/__init__.py b/.venv/lib/python3.12/site-packages/boto3/s3/__init__.py
new file mode 100644
index 00000000..6001b27b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/boto3/s3/__init__.py
@@ -0,0 +1,12 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# https://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
diff --git a/.venv/lib/python3.12/site-packages/boto3/s3/constants.py b/.venv/lib/python3.12/site-packages/boto3/s3/constants.py
new file mode 100644
index 00000000..c7f691fc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/boto3/s3/constants.py
@@ -0,0 +1,17 @@
+# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# https://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+
+# TransferConfig preferred_transfer_client settings
+CLASSIC_TRANSFER_CLIENT = "classic"
+AUTO_RESOLVE_TRANSFER_CLIENT = "auto"
diff --git a/.venv/lib/python3.12/site-packages/boto3/s3/inject.py b/.venv/lib/python3.12/site-packages/boto3/s3/inject.py
new file mode 100644
index 00000000..9c4dcb52
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/boto3/s3/inject.py
@@ -0,0 +1,926 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# https://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+import copy as python_copy
+from functools import partial
+
+from botocore.exceptions import ClientError
+
+from boto3 import utils
+from boto3.s3.transfer import (
+    ProgressCallbackInvoker,
+    S3Transfer,
+    TransferConfig,
+    create_transfer_manager,
+)
+
+try:
+    from botocore.context import with_current_context
+except ImportError:
+    from functools import wraps
+
+    def with_current_context(hook=None):
+        def decorator(func):
+            @wraps(func)
+            def wrapper(*args, **kwargs):
+                return func(*args, **kwargs)
+
+            return wrapper
+
+        return decorator
+
+
+try:
+    from botocore.useragent import register_feature_id
+except ImportError:
+
+    def register_feature_id(feature_id):
+        pass
+
+
+def inject_s3_transfer_methods(class_attributes, **kwargs):
+    utils.inject_attribute(class_attributes, 'upload_file', upload_file)
+    utils.inject_attribute(class_attributes, 'download_file', download_file)
+    utils.inject_attribute(class_attributes, 'copy', copy)
+    utils.inject_attribute(class_attributes, 'upload_fileobj', upload_fileobj)
+    utils.inject_attribute(
+        class_attributes, 'download_fileobj', download_fileobj
+    )
+
+
+def inject_bucket_methods(class_attributes, **kwargs):
+    utils.inject_attribute(class_attributes, 'load', bucket_load)
+    utils.inject_attribute(class_attributes, 'upload_file', bucket_upload_file)
+    utils.inject_attribute(
+        class_attributes, 'download_file', bucket_download_file
+    )
+    utils.inject_attribute(class_attributes, 'copy', bucket_copy)
+    utils.inject_attribute(
+        class_attributes, 'upload_fileobj', bucket_upload_fileobj
+    )
+    utils.inject_attribute(
+        class_attributes, 'download_fileobj', bucket_download_fileobj
+    )
+
+
+def inject_object_methods(class_attributes, **kwargs):
+    utils.inject_attribute(class_attributes, 'upload_file', object_upload_file)
+    utils.inject_attribute(
+        class_attributes, 'download_file', object_download_file
+    )
+    utils.inject_attribute(class_attributes, 'copy', object_copy)
+    utils.inject_attribute(
+        class_attributes, 'upload_fileobj', object_upload_fileobj
+    )
+    utils.inject_attribute(
+        class_attributes, 'download_fileobj', object_download_fileobj
+    )
+
+
+def inject_object_summary_methods(class_attributes, **kwargs):
+    utils.inject_attribute(class_attributes, 'load', object_summary_load)
+
+
+def bucket_load(self, *args, **kwargs):
+    """
+    Calls s3.Client.list_buckets() to update the attributes of the Bucket
+    resource.
+    """
+    # The docstring above is phrased this way to match what the autogenerated
+    # docs produce.
+
+    # We can't actually get the bucket's attributes from a HeadBucket,
+    # so we need to use a ListBuckets and search for our bucket.
+    # However, we may fail if we lack permissions to ListBuckets
+    # or the bucket is in another account. In which case, creation_date
+    # will be None.
+    self.meta.data = {}
+    try:
+        response = self.meta.client.list_buckets()
+        for bucket_data in response['Buckets']:
+            if bucket_data['Name'] == self.name:
+                self.meta.data = bucket_data
+                break
+    except ClientError as e:
+        if not e.response.get('Error', {}).get('Code') == 'AccessDenied':
+            raise
+
+
+def object_summary_load(self, *args, **kwargs):
+    """
+    Calls s3.Client.head_object to update the attributes of the ObjectSummary
+    resource.
+    """
+    response = self.meta.client.head_object(
+        Bucket=self.bucket_name, Key=self.key
+    )
+    if 'ContentLength' in response:
+        response['Size'] = response.pop('ContentLength')
+    self.meta.data = response
+
+
+@with_current_context(partial(register_feature_id, 'S3_TRANSFER'))
+def upload_file(
+    self, Filename, Bucket, Key, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file to an S3 object.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.client('s3')
+        s3.upload_file('/tmp/hello.txt', 'amzn-s3-demo-bucket', 'hello.txt')
+
+    Similar behavior as S3Transfer's upload_file() method, except that
+    argument names are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Filename: str
+    :param Filename: The path to the file to upload.
+
+    :type Bucket: str
+    :param Bucket: The name of the bucket to upload to.
+
+    :type Key: str
+    :param Key: The name of the key to upload to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    with S3Transfer(self, Config) as transfer:
+        return transfer.upload_file(
+            filename=Filename,
+            bucket=Bucket,
+            key=Key,
+            extra_args=ExtraArgs,
+            callback=Callback,
+        )
+
+
+@with_current_context(partial(register_feature_id, 'S3_TRANSFER'))
+def download_file(
+    self, Bucket, Key, Filename, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download an S3 object to a file.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.client('s3')
+        s3.download_file('amzn-s3-demo-bucket', 'hello.txt', '/tmp/hello.txt')
+
+    Similar behavior as S3Transfer's download_file() method,
+    except that parameters are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Bucket: str
+    :param Bucket: The name of the bucket to download from.
+
+    :type Key: str
+    :param Key: The name of the key to download from.
+
+    :type Filename: str
+    :param Filename: The path to the file to download to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    with S3Transfer(self, Config) as transfer:
+        return transfer.download_file(
+            bucket=Bucket,
+            key=Key,
+            filename=Filename,
+            extra_args=ExtraArgs,
+            callback=Callback,
+        )
+
+
+def bucket_upload_file(
+    self, Filename, Key, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file to an S3 object.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        s3.Bucket('amzn-s3-demo-bucket').upload_file('/tmp/hello.txt', 'hello.txt')
+
+    Similar behavior as S3Transfer's upload_file() method,
+    except that parameters are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Filename: str
+    :param Filename: The path to the file to upload.
+
+    :type Key: str
+    :param Key: The name of the key to upload to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    return self.meta.client.upload_file(
+        Filename=Filename,
+        Bucket=self.name,
+        Key=Key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+def bucket_download_file(
+    self, Key, Filename, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download an S3 object to a file.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        s3.Bucket('amzn-s3-demo-bucket').download_file('hello.txt', '/tmp/hello.txt')
+
+    Similar behavior as S3Transfer's download_file() method,
+    except that parameters are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Key: str
+    :param Key: The name of the key to download from.
+
+    :type Filename: str
+    :param Filename: The path to the file to download to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    return self.meta.client.download_file(
+        Bucket=self.name,
+        Key=Key,
+        Filename=Filename,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+def object_upload_file(
+    self, Filename, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file to an S3 object.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        s3.Object('amzn-s3-demo-bucket', 'hello.txt').upload_file('/tmp/hello.txt')
+
+    Similar behavior as S3Transfer's upload_file() method,
+    except that parameters are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Filename: str
+    :param Filename: The path to the file to upload.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    return self.meta.client.upload_file(
+        Filename=Filename,
+        Bucket=self.bucket_name,
+        Key=self.key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+def object_download_file(
+    self, Filename, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download an S3 object to a file.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        s3.Object('amzn-s3-demo-bucket', 'hello.txt').download_file('/tmp/hello.txt')
+
+    Similar behavior as S3Transfer's download_file() method,
+    except that parameters are capitalized. Detailed examples can be found at
+    :ref:`S3Transfer's Usage <ref_s3transfer_usage>`.
+
+    :type Filename: str
+    :param Filename: The path to the file to download to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        transfer.
+    """
+    return self.meta.client.download_file(
+        Bucket=self.bucket_name,
+        Key=self.key,
+        Filename=Filename,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+@with_current_context(partial(register_feature_id, 'S3_TRANSFER'))
+def copy(
+    self,
+    CopySource,
+    Bucket,
+    Key,
+    ExtraArgs=None,
+    Callback=None,
+    SourceClient=None,
+    Config=None,
+):
+    """Copy an object from one S3 location to another.
+
+    This is a managed transfer which will perform a multipart copy in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        copy_source = {
+            'Bucket': 'amzn-s3-demo-bucket1',
+            'Key': 'mykey'
+        }
+        s3.meta.client.copy(copy_source, 'amzn-s3-demo-bucket2', 'otherkey')
+
+    :type CopySource: dict
+    :param CopySource: The name of the source bucket, key name of the
+        source object, and optional version ID of the source object. The
+        dictionary format is:
+        ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+        that the ``VersionId`` key is optional and may be omitted.
+
+    :type Bucket: str
+    :param Bucket: The name of the bucket to copy to
+
+    :type Key: str
+    :param Key: The name of the key to copy to
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the copy.
+
+    :type SourceClient: botocore or boto3 Client
+    :param SourceClient: The client to be used for operation that
+        may happen at the source object. For example, this client is
+        used for the head_object that determines the size of the copy.
+        If no client is provided, the current client is used as the client
+        for the source object.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        copy.
+    """
+    subscribers = None
+    if Callback is not None:
+        subscribers = [ProgressCallbackInvoker(Callback)]
+
+    config = Config
+    if config is None:
+        config = TransferConfig()
+
+    # copy is not supported in the CRT
+    new_config = python_copy.copy(config)
+    new_config.preferred_transfer_client = "classic"
+
+    with create_transfer_manager(self, new_config) as manager:
+        future = manager.copy(
+            copy_source=CopySource,
+            bucket=Bucket,
+            key=Key,
+            extra_args=ExtraArgs,
+            subscribers=subscribers,
+            source_client=SourceClient,
+        )
+        return future.result()
+
+
+def bucket_copy(
+    self,
+    CopySource,
+    Key,
+    ExtraArgs=None,
+    Callback=None,
+    SourceClient=None,
+    Config=None,
+):
+    """Copy an object from one S3 location to an object in this bucket.
+
+    This is a managed transfer which will perform a multipart copy in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        copy_source = {
+            'Bucket': 'amzn-s3-demo-bucket1',
+            'Key': 'mykey'
+        }
+        bucket = s3.Bucket('amzn-s3-demo-bucket2')
+        bucket.copy(copy_source, 'otherkey')
+
+    :type CopySource: dict
+    :param CopySource: The name of the source bucket, key name of the
+        source object, and optional version ID of the source object. The
+        dictionary format is:
+        ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+        that the ``VersionId`` key is optional and may be omitted.
+
+    :type Key: str
+    :param Key: The name of the key to copy to
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the copy.
+
+    :type SourceClient: botocore or boto3 Client
+    :param SourceClient: The client to be used for operation that
+        may happen at the source object. For example, this client is
+        used for the head_object that determines the size of the copy.
+        If no client is provided, the current client is used as the client
+        for the source object.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        copy.
+    """
+    return self.meta.client.copy(
+        CopySource=CopySource,
+        Bucket=self.name,
+        Key=Key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        SourceClient=SourceClient,
+        Config=Config,
+    )
+
+
+def object_copy(
+    self,
+    CopySource,
+    ExtraArgs=None,
+    Callback=None,
+    SourceClient=None,
+    Config=None,
+):
+    """Copy an object from one S3 location to this object.
+
+    This is a managed transfer which will perform a multipart copy in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        copy_source = {
+            'Bucket': 'amzn-s3-demo-bucket1',
+            'Key': 'mykey'
+        }
+        bucket = s3.Bucket('amzn-s3-demo-bucket2')
+        obj = bucket.Object('otherkey')
+        obj.copy(copy_source)
+
+    :type CopySource: dict
+    :param CopySource: The name of the source bucket, key name of the
+        source object, and optional version ID of the source object. The
+        dictionary format is:
+        ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note
+        that the ``VersionId`` key is optional and may be omitted.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the copy.
+
+    :type SourceClient: botocore or boto3 Client
+    :param SourceClient: The client to be used for operation that
+        may happen at the source object. For example, this client is
+        used for the head_object that determines the size of the copy.
+        If no client is provided, the current client is used as the client
+        for the source object.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        copy.
+    """
+    return self.meta.client.copy(
+        CopySource=CopySource,
+        Bucket=self.bucket_name,
+        Key=self.key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        SourceClient=SourceClient,
+        Config=Config,
+    )
+
+
+@with_current_context(partial(register_feature_id, 'S3_TRANSFER'))
+def upload_fileobj(
+    self, Fileobj, Bucket, Key, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file-like object to S3.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart upload in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.client('s3')
+
+        with open('filename', 'rb') as data:
+            s3.upload_fileobj(data, 'amzn-s3-demo-bucket', 'mykey')
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to upload. At a minimum, it must
+        implement the `read` method, and must return bytes.
+
+    :type Bucket: str
+    :param Bucket: The name of the bucket to upload to.
+
+    :type Key: str
+    :param Key: The name of the key to upload to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        upload.
+    """
+    if not hasattr(Fileobj, 'read'):
+        raise ValueError('Fileobj must implement read')
+
+    subscribers = None
+    if Callback is not None:
+        subscribers = [ProgressCallbackInvoker(Callback)]
+
+    config = Config
+    if config is None:
+        config = TransferConfig()
+
+    with create_transfer_manager(self, config) as manager:
+        future = manager.upload(
+            fileobj=Fileobj,
+            bucket=Bucket,
+            key=Key,
+            extra_args=ExtraArgs,
+            subscribers=subscribers,
+        )
+        return future.result()
+
+
+def bucket_upload_fileobj(
+    self, Fileobj, Key, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file-like object to this bucket.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart upload in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        bucket = s3.Bucket('amzn-s3-demo-bucket')
+
+        with open('filename', 'rb') as data:
+            bucket.upload_fileobj(data, 'mykey')
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to upload. At a minimum, it must
+        implement the `read` method, and must return bytes.
+
+    :type Key: str
+    :param Key: The name of the key to upload to.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        upload.
+    """
+    return self.meta.client.upload_fileobj(
+        Fileobj=Fileobj,
+        Bucket=self.name,
+        Key=Key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+def object_upload_fileobj(
+    self, Fileobj, ExtraArgs=None, Callback=None, Config=None
+):
+    """Upload a file-like object to this object.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart upload in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        bucket = s3.Bucket('amzn-s3-demo-bucket')
+        obj = bucket.Object('mykey')
+
+        with open('filename', 'rb') as data:
+            obj.upload_fileobj(data)
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to upload. At a minimum, it must
+        implement the `read` method, and must return bytes.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed upload arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the upload.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        upload.
+    """
+    return self.meta.client.upload_fileobj(
+        Fileobj=Fileobj,
+        Bucket=self.bucket_name,
+        Key=self.key,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+@with_current_context(partial(register_feature_id, 'S3_TRANSFER'))
+def download_fileobj(
+    self, Bucket, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download an object from S3 to a file-like object.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart download in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.client('s3')
+
+        with open('filename', 'wb') as data:
+            s3.download_fileobj('amzn-s3-demo-bucket', 'mykey', data)
+
+    :type Bucket: str
+    :param Bucket: The name of the bucket to download from.
+
+    :type Key: str
+    :param Key: The name of the key to download from.
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to download into. At a minimum, it must
+        implement the `write` method and must accept bytes.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        download.
+    """
+    if not hasattr(Fileobj, 'write'):
+        raise ValueError('Fileobj must implement write')
+
+    subscribers = None
+    if Callback is not None:
+        subscribers = [ProgressCallbackInvoker(Callback)]
+
+    config = Config
+    if config is None:
+        config = TransferConfig()
+
+    with create_transfer_manager(self, config) as manager:
+        future = manager.download(
+            bucket=Bucket,
+            key=Key,
+            fileobj=Fileobj,
+            extra_args=ExtraArgs,
+            subscribers=subscribers,
+        )
+        return future.result()
+
+
+def bucket_download_fileobj(
+    self, Key, Fileobj, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download an object from this bucket to a file-like-object.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart download in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        bucket = s3.Bucket('amzn-s3-demo-bucket')
+
+        with open('filename', 'wb') as data:
+            bucket.download_fileobj('mykey', data)
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to download into. At a minimum, it must
+        implement the `write` method and must accept bytes.
+
+    :type Key: str
+    :param Key: The name of the key to download from.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        download.
+    """
+    return self.meta.client.download_fileobj(
+        Bucket=self.name,
+        Key=Key,
+        Fileobj=Fileobj,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
+
+
+def object_download_fileobj(
+    self, Fileobj, ExtraArgs=None, Callback=None, Config=None
+):
+    """Download this object from S3 to a file-like object.
+
+    The file-like object must be in binary mode.
+
+    This is a managed transfer which will perform a multipart download in
+    multiple threads if necessary.
+
+    Usage::
+
+        import boto3
+        s3 = boto3.resource('s3')
+        bucket = s3.Bucket('amzn-s3-demo-bucket')
+        obj = bucket.Object('mykey')
+
+        with open('filename', 'wb') as data:
+            obj.download_fileobj(data)
+
+    :type Fileobj: a file-like object
+    :param Fileobj: A file-like object to download into. At a minimum, it must
+        implement the `write` method and must accept bytes.
+
+    :type ExtraArgs: dict
+    :param ExtraArgs: Extra arguments that may be passed to the
+        client operation. For allowed download arguments see
+        :py:attr:`boto3.s3.transfer.S3Transfer.ALLOWED_DOWNLOAD_ARGS`.
+
+    :type Callback: function
+    :param Callback: A method which takes a number of bytes transferred to
+        be periodically called during the download.
+
+    :type Config: boto3.s3.transfer.TransferConfig
+    :param Config: The transfer configuration to be used when performing the
+        download.
+    """
+    return self.meta.client.download_fileobj(
+        Bucket=self.bucket_name,
+        Key=self.key,
+        Fileobj=Fileobj,
+        ExtraArgs=ExtraArgs,
+        Callback=Callback,
+        Config=Config,
+    )
diff --git a/.venv/lib/python3.12/site-packages/boto3/s3/transfer.py b/.venv/lib/python3.12/site-packages/boto3/s3/transfer.py
new file mode 100644
index 00000000..73a3388b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/boto3/s3/transfer.py
@@ -0,0 +1,438 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"). You
+# may not use this file except in compliance with the License. A copy of
+# the License is located at
+#
+# https://aws.amazon.com/apache2.0/
+#
+# or in the "license" file accompanying this file. This file is
+# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
+# ANY KIND, either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+"""Abstractions over S3's upload/download operations.
+
+This module provides high level abstractions for efficient
+uploads/downloads.  It handles several things for the user:
+
+* Automatically switching to multipart transfers when
+  a file is over a specific size threshold
+* Uploading/downloading a file in parallel
+* Progress callbacks to monitor transfers
+* Retries.  While botocore handles retries for streaming uploads,
+  it is not possible for it to handle retries for streaming
+  downloads.  This module handles retries for both cases so
+  you don't need to implement any retry logic yourself.
+
+This module has a reasonable set of defaults.  It also allows you
+to configure many aspects of the transfer process including:
+
+* Multipart threshold size
+* Max parallel downloads
+* Socket timeouts
+* Retry amounts
+
+There is no support for s3->s3 multipart copies at this
+time.
+
+
+.. _ref_s3transfer_usage:
+
+Usage
+=====
+
+The simplest way to use this module is:
+
+.. code-block:: python
+
+    client = boto3.client('s3', 'us-west-2')
+    transfer = S3Transfer(client)
+    # Upload /tmp/myfile to s3://bucket/key
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key')
+
+    # Download s3://bucket/key to /tmp/myfile
+    transfer.download_file('bucket', 'key', '/tmp/myfile')
+
+The ``upload_file`` and ``download_file`` methods also accept
+``**kwargs``, which will be forwarded through to the corresponding
+client operation.  Here are a few examples using ``upload_file``::
+
+    # Making the object public
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         extra_args={'ACL': 'public-read'})
+
+    # Setting metadata
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         extra_args={'Metadata': {'a': 'b', 'c': 'd'}})
+
+    # Setting content type
+    transfer.upload_file('/tmp/myfile.json', 'bucket', 'key',
+                         extra_args={'ContentType': "application/json"})
+
+
+The ``S3Transfer`` class also supports progress callbacks so you can
+provide transfer progress to users.  Both the ``upload_file`` and
+``download_file`` methods take an optional ``callback`` parameter.
+Here's an example of how to print a simple progress percentage
+to the user:
+
+.. code-block:: python
+
+    class ProgressPercentage(object):
+        def __init__(self, filename):
+            self._filename = filename
+            self._size = float(os.path.getsize(filename))
+            self._seen_so_far = 0
+            self._lock = threading.Lock()
+
+        def __call__(self, bytes_amount):
+            # To simplify we'll assume this is hooked up
+            # to a single filename.
+            with self._lock:
+                self._seen_so_far += bytes_amount
+                percentage = (self._seen_so_far / self._size) * 100
+                sys.stdout.write(
+                    "\r%s  %s / %s  (%.2f%%)" % (
+                        self._filename, self._seen_so_far, self._size,
+                        percentage))
+                sys.stdout.flush()
+
+
+    transfer = S3Transfer(boto3.client('s3', 'us-west-2'))
+    # Upload /tmp/myfile to s3://bucket/key and print upload progress.
+    transfer.upload_file('/tmp/myfile', 'bucket', 'key',
+                         callback=ProgressPercentage('/tmp/myfile'))
+
+
+
+You can also provide a TransferConfig object to the S3Transfer
+object that gives you more fine grained control over the
+transfer.  For example:
+
+.. code-block:: python
+
+    client = boto3.client('s3', 'us-west-2')
+    config = TransferConfig(
+        multipart_threshold=8 * 1024 * 1024,
+        max_concurrency=10,
+        num_download_attempts=10,
+    )
+    transfer = S3Transfer(client, config)
+    transfer.upload_file('/tmp/foo', 'bucket', 'key')
+
+
+"""
+
+import logging
+import threading
+from os import PathLike, fspath, getpid
+
+from botocore.compat import HAS_CRT
+from botocore.exceptions import ClientError
+from s3transfer.exceptions import (
+    RetriesExceededError as S3TransferRetriesExceededError,
+)
+from s3transfer.futures import NonThreadedExecutor
+from s3transfer.manager import TransferConfig as S3TransferConfig
+from s3transfer.manager import TransferManager
+from s3transfer.subscribers import BaseSubscriber
+from s3transfer.utils import OSUtils
+
+import boto3.s3.constants as constants
+from boto3.exceptions import RetriesExceededError, S3UploadFailedError
+
+if HAS_CRT:
+    import awscrt.s3
+
+    from boto3.crt import create_crt_transfer_manager
+
+KB = 1024
+MB = KB * KB
+
+logger = logging.getLogger(__name__)
+
+
+def create_transfer_manager(client, config, osutil=None):
+    """Creates a transfer manager based on configuration
+
+    :type client: boto3.client
+    :param client: The S3 client to use
+
+    :type config: boto3.s3.transfer.TransferConfig
+    :param config: The transfer config to use
+
+    :type osutil: s3transfer.utils.OSUtils
+    :param osutil: The os utility to use
+
+    :rtype: s3transfer.manager.TransferManager
+    :returns: A transfer manager based on parameters provided
+    """
+    if _should_use_crt(config):
+        crt_transfer_manager = create_crt_transfer_manager(client, config)
+        if crt_transfer_manager is not None:
+            logger.debug(
+                f"Using CRT client. pid: {getpid()}, thread: {threading.get_ident()}"
+            )
+            return crt_transfer_manager
+
+    # If we don't resolve something above, fallback to the default.
+    logger.debug(
+        f"Using default client. pid: {getpid()}, thread: {threading.get_ident()}"
+    )
+    return _create_default_transfer_manager(client, config, osutil)
+
+
+def _should_use_crt(config):
+    # This feature requires awscrt>=0.19.18
+    if HAS_CRT and has_minimum_crt_version((0, 19, 18)):
+        is_optimized_instance = awscrt.s3.is_optimized_for_system()
+    else:
+        is_optimized_instance = False
+    pref_transfer_client = config.preferred_transfer_client.lower()
+
+    if (
+        is_optimized_instance
+        and pref_transfer_client == constants.AUTO_RESOLVE_TRANSFER_CLIENT
+    ):
+        logger.debug(
+            "Attempting to use CRTTransferManager. Config settings may be ignored."
+        )
+        return True
+
+    logger.debug(
+        "Opting out of CRT Transfer Manager. Preferred client: "
+        f"{pref_transfer_client}, CRT available: {HAS_CRT}, "
+        f"Instance Optimized: {is_optimized_instance}."
+    )
+    return False
+
+
+def has_minimum_crt_version(minimum_version):
+    """Not intended for use outside boto3."""
+    if not HAS_CRT:
+        return False
+
+    crt_version_str = awscrt.__version__
+    try:
+        crt_version_ints = map(int, crt_version_str.split("."))
+        crt_version_tuple = tuple(crt_version_ints)
+    except (TypeError, ValueError):
+        return False
+
+    return crt_version_tuple >= minimum_version
+
+
+def _create_default_transfer_manager(client, config, osutil):
+    """Create the default TransferManager implementation for s3transfer."""
+    executor_cls = None
+    if not config.use_threads:
+        executor_cls = NonThreadedExecutor
+    return TransferManager(client, config, osutil, executor_cls)
+
+
+class TransferConfig(S3TransferConfig):
+    ALIAS = {
+        'max_concurrency': 'max_request_concurrency',
+        'max_io_queue': 'max_io_queue_size',
+    }
+
+    def __init__(
+        self,
+        multipart_threshold=8 * MB,
+        max_concurrency=10,
+        multipart_chunksize=8 * MB,
+        num_download_attempts=5,
+        max_io_queue=100,
+        io_chunksize=256 * KB,
+        use_threads=True,
+        max_bandwidth=None,
+        preferred_transfer_client=constants.AUTO_RESOLVE_TRANSFER_CLIENT,
+    ):
+        """Configuration object for managed S3 transfers
+
+        :param multipart_threshold: The transfer size threshold for which
+            multipart uploads, downloads, and copies will automatically be
+            triggered.
+
+        :param max_concurrency: The maximum number of threads that will be
+            making requests to perform a transfer. If ``use_threads`` is
+            set to ``False``, the value provided is ignored as the transfer
+            will only ever use the current thread.
+
+        :param multipart_chunksize: The partition size of each part for a
+            multipart transfer.
+
+        :param num_download_attempts: The number of download attempts that
+            will be retried upon errors with downloading an object in S3.
+            Note that these retries account for errors that occur when
+            streaming  down the data from s3 (i.e. socket errors and read
+            timeouts that occur after receiving an OK response from s3).
+            Other retryable exceptions such as throttling errors and 5xx
+            errors are already retried by botocore (this default is 5). This
+            does not take into account the number of exceptions retried by
+            botocore.
+
+        :param max_io_queue: The maximum amount of read parts that can be
+            queued in memory to be written for a download. The size of each
+            of these read parts is at most the size of ``io_chunksize``.
+
+        :param io_chunksize: The max size of each chunk in the io queue.
+            Currently, this is size used when ``read`` is called on the
+            downloaded stream as well.
+
+        :param use_threads: If True, threads will be used when performing
+            S3 transfers. If False, no threads will be used in
+            performing transfers; all logic will be run in the current thread.
+
+        :param max_bandwidth: The maximum bandwidth that will be consumed
+            in uploading and downloading file content. The value is an integer
+            in terms of bytes per second.
+
+        :param preferred_transfer_client: String specifying preferred transfer
+            client for transfer operations.
+
+            Current supported settings are:
+              * auto (default) - Use the CRTTransferManager when calls
+                  are made with supported environment and settings.
+              * classic - Only use the origin S3TransferManager with
+                  requests. Disables possible CRT upgrade on requests.
+        """
+        super().__init__(
+            multipart_threshold=multipart_threshold,
+            max_request_concurrency=max_concurrency,
+            multipart_chunksize=multipart_chunksize,
+            num_download_attempts=num_download_attempts,
+            max_io_queue_size=max_io_queue,
+            io_chunksize=io_chunksize,
+            max_bandwidth=max_bandwidth,
+        )
+        # Some of the argument names are not the same as the inherited
+        # S3TransferConfig so we add aliases so you can still access the
+        # old version of the names.
+        for alias in self.ALIAS:
+            setattr(self, alias, getattr(self, self.ALIAS[alias]))
+        self.use_threads = use_threads
+        self.preferred_transfer_client = preferred_transfer_client
+
+    def __setattr__(self, name, value):
+        # If the alias name is used, make sure we set the name that it points
+        # to as that is what actually is used in governing the TransferManager.
+        if name in self.ALIAS:
+            super().__setattr__(self.ALIAS[name], value)
+        # Always set the value of the actual name provided.
+        super().__setattr__(name, value)
+
+
+class S3Transfer:
+    ALLOWED_DOWNLOAD_ARGS = TransferManager.ALLOWED_DOWNLOAD_ARGS
+    ALLOWED_UPLOAD_ARGS = TransferManager.ALLOWED_UPLOAD_ARGS
+
+    def __init__(self, client=None, config=None, osutil=None, manager=None):
+        if not client and not manager:
+            raise ValueError(
+                'Either a boto3.Client or s3transfer.manager.TransferManager '
+                'must be provided'
+            )
+        if manager and any([client, config, osutil]):
+            raise ValueError(
+                'Manager cannot be provided with client, config, '
+                'nor osutil. These parameters are mutually exclusive.'
+            )
+        if config is None:
+            config = TransferConfig()
+        if osutil is None:
+            osutil = OSUtils()
+        if manager:
+            self._manager = manager
+        else:
+            self._manager = create_transfer_manager(client, config, osutil)
+
+    def upload_file(
+        self, filename, bucket, key, callback=None, extra_args=None
+    ):
+        """Upload a file to an S3 object.
+
+        Variants have also been injected into S3 client, Bucket and Object.
+        You don't have to use S3Transfer.upload_file() directly.
+
+        .. seealso::
+            :py:meth:`S3.Client.upload_file`
+            :py:meth:`S3.Client.upload_fileobj`
+        """
+        if isinstance(filename, PathLike):
+            filename = fspath(filename)
+        if not isinstance(filename, str):
+            raise ValueError('Filename must be a string or a path-like object')
+
+        subscribers = self._get_subscribers(callback)
+        future = self._manager.upload(
+            filename, bucket, key, extra_args, subscribers
+        )
+        try:
+            future.result()
+        # If a client error was raised, add the backwards compatibility layer
+        # that raises a S3UploadFailedError. These specific errors were only
+        # ever thrown for upload_parts but now can be thrown for any related
+        # client error.
+        except ClientError as e:
+            raise S3UploadFailedError(
+                "Failed to upload {} to {}: {}".format(
+                    filename, '/'.join([bucket, key]), e
+                )
+            )
+
+    def download_file(
+        self, bucket, key, filename, extra_args=None, callback=None
+    ):
+        """Download an S3 object to a file.
+
+        Variants have also been injected into S3 client, Bucket and Object.
+        You don't have to use S3Transfer.download_file() directly.
+
+        .. seealso::
+            :py:meth:`S3.Client.download_file`
+            :py:meth:`S3.Client.download_fileobj`
+        """
+        if isinstance(filename, PathLike):
+            filename = fspath(filename)
+        if not isinstance(filename, str):
+            raise ValueError('Filename must be a string or a path-like object')
+
+        subscribers = self._get_subscribers(callback)
+        future = self._manager.download(
+            bucket, key, filename, extra_args, subscribers
+        )
+        try:
+            future.result()
+        # This is for backwards compatibility where when retries are
+        # exceeded we need to throw the same error from boto3 instead of
+        # s3transfer's built in RetriesExceededError as current users are
+        # catching the boto3 one instead of the s3transfer exception to do
+        # their own retries.
+        except S3TransferRetriesExceededError as e:
+            raise RetriesExceededError(e.last_exception)
+
+    def _get_subscribers(self, callback):
+        if not callback:
+            return None
+        return [ProgressCallbackInvoker(callback)]
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *args):
+        self._manager.__exit__(*args)
+
+
+class ProgressCallbackInvoker(BaseSubscriber):
+    """A back-compat wrapper to invoke a provided callback via a subscriber
+
+    :param callback: A callable that takes a single positional argument for
+        how many bytes were transferred.
+    """
+
+    def __init__(self, callback):
+        self._callback = callback
+
+    def on_progress(self, bytes_transferred, **kwargs):
+        self._callback(bytes_transferred)