aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/storage3
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/storage3
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/storage3')
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/__init__.py44
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_async/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_async/bucket.py120
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_async/client.py70
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_async/file_api.py559
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_sync/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_sync/bucket.py120
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_sync/client.py70
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/_sync/file_api.py559
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/constants.py15
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/exceptions.py33
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/types.py121
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/utils.py11
-rw-r--r--.venv/lib/python3.12/site-packages/storage3/version.py1
14 files changed, 1725 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/storage3/__init__.py b/.venv/lib/python3.12/site-packages/storage3/__init__.py
new file mode 100644
index 00000000..48b4cb74
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/__init__.py
@@ -0,0 +1,44 @@
+from __future__ import annotations
+
+from typing import Literal, Union, overload
+
+from storage3._async import AsyncStorageClient
+from storage3._async.bucket import AsyncStorageBucketAPI
+from storage3._async.file_api import AsyncBucket
+from storage3._sync import SyncStorageClient
+from storage3._sync.bucket import SyncStorageBucketAPI
+from storage3._sync.file_api import SyncBucket
+from storage3.constants import DEFAULT_TIMEOUT
+from storage3.version import __version__
+
+__all__ = [
+ "create_client",
+ "__version__",
+ "AsyncStorageClient",
+ "AsyncBucket",
+ "AsyncStorageBucketAPI",
+ "SyncStorageClient",
+ "SyncBucket",
+ "SyncStorageBucketAPI",
+]
+
+
+@overload
+def create_client(
+ url: str, headers: dict[str, str], *, is_async: Literal[True]
+) -> AsyncStorageClient: ...
+
+
+@overload
+def create_client(
+ url: str, headers: dict[str, str], *, is_async: Literal[False]
+) -> SyncStorageClient: ...
+
+
+def create_client(
+ url: str, headers: dict[str, str], *, is_async: bool, timeout: int = DEFAULT_TIMEOUT
+) -> Union[AsyncStorageClient, SyncStorageClient]:
+ if is_async:
+ return AsyncStorageClient(url, headers, timeout)
+ else:
+ return SyncStorageClient(url, headers, timeout)
diff --git a/.venv/lib/python3.12/site-packages/storage3/_async/__init__.py b/.venv/lib/python3.12/site-packages/storage3/_async/__init__.py
new file mode 100644
index 00000000..694f552f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_async/__init__.py
@@ -0,0 +1 @@
+from .client import AsyncStorageClient as AsyncStorageClient
diff --git a/.venv/lib/python3.12/site-packages/storage3/_async/bucket.py b/.venv/lib/python3.12/site-packages/storage3/_async/bucket.py
new file mode 100644
index 00000000..9d6bf886
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_async/bucket.py
@@ -0,0 +1,120 @@
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from httpx import HTTPStatusError, Response
+
+from ..exceptions import StorageApiError
+from ..types import CreateOrUpdateBucketOptions, RequestMethod
+from ..utils import AsyncClient
+from .file_api import AsyncBucket
+
+__all__ = ["AsyncStorageBucketAPI"]
+
+
+class AsyncStorageBucketAPI:
+ """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket"""
+
+ def __init__(self, session: AsyncClient) -> None:
+ self._client = session
+
+ async def _request(
+ self,
+ method: RequestMethod,
+ url: str,
+ json: Optional[dict[Any, Any]] = None,
+ ) -> Response:
+ try:
+ response = await self._client.request(method, url, json=json)
+ response.raise_for_status()
+ except HTTPStatusError as exc:
+ resp = exc.response.json()
+ raise StorageApiError(resp["message"], resp["error"], resp["statusCode"])
+
+ return response
+
+ async def list_buckets(self) -> list[AsyncBucket]:
+ """Retrieves the details of all storage buckets within an existing product."""
+ # if the request doesn't error, it is assured to return a list
+ res = await self._request("GET", "/bucket")
+ return [AsyncBucket(**bucket) for bucket in res.json()]
+
+ async def get_bucket(self, id: str) -> AsyncBucket:
+ """Retrieves the details of an existing storage bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to retrieve.
+ """
+ res = await self._request("GET", f"/bucket/{id}")
+ json = res.json()
+ return AsyncBucket(**json)
+
+ async def create_bucket(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ options: Optional[CreateOrUpdateBucketOptions] = None,
+ ) -> dict[str, str]:
+ """Creates a new storage bucket.
+
+ Parameters
+ ----------
+ id
+ A unique identifier for the bucket you are creating.
+ name
+ A name for the bucket you are creating. If not passed, the id is used as the name as well.
+ options
+ Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and
+ `allowed_mime_types`.
+ """
+ json: dict[str, Any] = {"id": id, "name": name or id}
+ if options:
+ json.update(**options)
+ res = await self._request(
+ "POST",
+ "/bucket",
+ json=json,
+ )
+ return res.json()
+
+ async def update_bucket(
+ self, id: str, options: CreateOrUpdateBucketOptions
+ ) -> dict[str, str]:
+ """Update a storage bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to update.
+ options
+ The properties you want to update. Valid options are `public`, `file_size_limit` and
+ `allowed_mime_types`.
+ """
+ json = {"id": id, "name": id, **options}
+ res = await self._request("PUT", f"/bucket/{id}", json=json)
+ return res.json()
+
+ async def empty_bucket(self, id: str) -> dict[str, str]:
+ """Removes all objects inside a single bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to empty.
+ """
+ res = await self._request("POST", f"/bucket/{id}/empty", json={})
+ return res.json()
+
+ async def delete_bucket(self, id: str) -> dict[str, str]:
+ """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first
+ `empty()` the bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to delete.
+ """
+ res = await self._request("DELETE", f"/bucket/{id}", json={})
+ return res.json()
diff --git a/.venv/lib/python3.12/site-packages/storage3/_async/client.py b/.venv/lib/python3.12/site-packages/storage3/_async/client.py
new file mode 100644
index 00000000..95852a02
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_async/client.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from storage3.constants import DEFAULT_TIMEOUT
+
+from ..utils import AsyncClient
+from ..version import __version__
+from .bucket import AsyncStorageBucketAPI
+from .file_api import AsyncBucketProxy
+
+__all__ = [
+ "AsyncStorageClient",
+]
+
+
+class AsyncStorageClient(AsyncStorageBucketAPI):
+ """Manage storage buckets and files."""
+
+ def __init__(
+ self,
+ url: str,
+ headers: dict[str, str],
+ timeout: int = DEFAULT_TIMEOUT,
+ verify: bool = True,
+ proxy: Optional[str] = None,
+ ) -> None:
+ headers = {
+ "User-Agent": f"supabase-py/storage3 v{__version__}",
+ **headers,
+ }
+ self.session = self._create_session(url, headers, timeout, verify, proxy)
+ super().__init__(self.session)
+
+ def _create_session(
+ self,
+ base_url: str,
+ headers: dict[str, str],
+ timeout: int,
+ verify: bool = True,
+ proxy: Optional[str] = None,
+ ) -> AsyncClient:
+ return AsyncClient(
+ base_url=base_url,
+ headers=headers,
+ timeout=timeout,
+ proxy=proxy,
+ verify=bool(verify),
+ follow_redirects=True,
+ http2=True,
+ )
+
+ async def __aenter__(self) -> AsyncStorageClient:
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb) -> None:
+ await self.aclose()
+
+ async def aclose(self) -> None:
+ await self.session.aclose()
+
+ def from_(self, id: str) -> AsyncBucketProxy:
+ """Run a storage file operation.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket
+ """
+ return AsyncBucketProxy(id, self._client)
diff --git a/.venv/lib/python3.12/site-packages/storage3/_async/file_api.py b/.venv/lib/python3.12/site-packages/storage3/_async/file_api.py
new file mode 100644
index 00000000..029a0330
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_async/file_api.py
@@ -0,0 +1,559 @@
+from __future__ import annotations
+
+import base64
+import json
+import urllib.parse
+from dataclasses import dataclass, field
+from io import BufferedReader, FileIO
+from pathlib import Path
+from typing import Any, Literal, Optional, Union, cast
+
+from httpx import HTTPStatusError, Response
+
+from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS
+from ..exceptions import StorageApiError
+from ..types import (
+ BaseBucket,
+ CreateSignedUrlResponse,
+ CreateSignedURLsOptions,
+ DownloadOptions,
+ FileOptions,
+ ListBucketFilesOptions,
+ RequestMethod,
+ SignedUploadURL,
+ SignedUrlResponse,
+ UploadData,
+ UploadResponse,
+ URLOptions,
+)
+from ..utils import AsyncClient, StorageException
+
+__all__ = ["AsyncBucket"]
+
+
+class AsyncBucketActionsMixin:
+ """Functions needed to access the file API."""
+
+ id: str
+ _client: AsyncClient
+
+ async def _request(
+ self,
+ method: RequestMethod,
+ url: str,
+ headers: Optional[dict[str, Any]] = None,
+ json: Optional[dict[Any, Any]] = None,
+ files: Optional[Any] = None,
+ **kwargs: Any,
+ ) -> Response:
+ try:
+ response = await self._client.request(
+ method, url, headers=headers or {}, json=json, files=files, **kwargs
+ )
+ response.raise_for_status()
+ except HTTPStatusError as exc:
+ resp = exc.response.json()
+ raise StorageApiError(resp["message"], resp["error"], resp["statusCode"])
+
+ # close the resource before returning the response
+ if files and "file" in files and isinstance(files["file"][1], BufferedReader):
+ files["file"][1].close()
+
+ return response
+
+ async def create_signed_upload_url(self, path: str) -> SignedUploadURL:
+ """
+ Creates a signed upload URL.
+
+ Parameters
+ ----------
+ path
+ The file path, including the file name. For example `folder/image.png`.
+ """
+ _path = self._get_final_path(path)
+ response = await self._request("POST", f"/object/upload/sign/{_path}")
+ data = response.json()
+ full_url: urllib.parse.ParseResult = urllib.parse.urlparse(
+ str(self._client.base_url) + cast(str, data["url"]).lstrip("/")
+ )
+ query_params = urllib.parse.parse_qs(full_url.query)
+ if not query_params.get("token"):
+ raise StorageException("No token sent by the API")
+ return {
+ "signed_url": full_url.geturl(),
+ "signedUrl": full_url.geturl(),
+ "token": query_params["token"][0],
+ "path": path,
+ }
+
+ async def upload_to_signed_url(
+ self,
+ path: str,
+ token: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Upload a file with a token generated from :meth:`.create_signed_url`
+
+ Parameters
+ ----------
+ path
+ The file path, including the file name
+ token
+ The token generated from :meth:`.create_signed_url`
+ file
+ The file contents or a file-like object to upload
+ file_options
+ Additional options for the uploaded file
+ """
+ _path = self._get_final_path(path)
+ _url = urllib.parse.urlparse(f"/object/upload/sign/{_path}")
+ query_params = urllib.parse.urlencode({"token": token})
+ final_url = f"{_url.geturl()}?{query_params}"
+
+ if file_options is None:
+ file_options = {}
+
+ cache_control = file_options.get("cache-control")
+ # cacheControl is also passed as form data
+ # https://github.com/supabase/storage-js/blob/fa44be8156295ba6320ffeff96bdf91016536a46/src/packages/StorageFileApi.ts#L89
+ _data = {}
+ if cache_control:
+ file_options["cache-control"] = f"max-age={cache_control}"
+ _data = {"cacheControl": cache_control}
+ headers = {
+ **self._client.headers,
+ **DEFAULT_FILE_OPTIONS,
+ **file_options,
+ }
+ filename = path.rsplit("/", maxsplit=1)[-1]
+
+ if (
+ isinstance(file, BufferedReader)
+ or isinstance(file, bytes)
+ or isinstance(file, FileIO)
+ ):
+ # bytes or byte-stream-like object received
+ _file = {"file": (filename, file, headers.pop("content-type"))}
+ else:
+ # str or pathlib.path received
+ _file = {
+ "file": (
+ filename,
+ open(file, "rb"),
+ headers.pop("content-type"),
+ )
+ }
+ response = await self._request(
+ "PUT", final_url, files=_file, headers=headers, data=_data
+ )
+ data: UploadData = response.json()
+
+ return UploadResponse(path=path, Key=data.get("Key"))
+
+ async def create_signed_url(
+ self, path: str, expires_in: int, options: URLOptions = {}
+ ) -> SignedUrlResponse:
+ """
+ Parameters
+ ----------
+ path
+ file path to be downloaded, including the current file name.
+ expires_in
+ number of seconds until the signed URL expires.
+ options
+ options to be passed for downloading or transforming the file.
+ """
+ json = {"expiresIn": str(expires_in)}
+ download_query = ""
+ if options.get("download"):
+ json.update({"download": options["download"]})
+
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+ if options.get("transform"):
+ json.update({"transform": options["transform"]})
+
+ path = self._get_final_path(path)
+ response = await self._request(
+ "POST",
+ f"/object/sign/{path}",
+ json=json,
+ )
+ data = response.json()
+
+ # Prepare URL
+ url = urllib.parse.urlparse(data["signedURL"])
+ url = urllib.parse.quote(url.path) + f"?{url.query}"
+
+ signedURL = (
+ f"{self._client.base_url}{cast(str, url).lstrip('/')}{download_query}"
+ )
+ data: SignedUrlResponse = {"signedURL": signedURL, "signedUrl": signedURL}
+ return data
+
+ async def create_signed_urls(
+ self, paths: list[str], expires_in: int, options: CreateSignedURLsOptions = {}
+ ) -> list[CreateSignedUrlResponse]:
+ """
+ Parameters
+ ----------
+ path
+ file path to be downloaded, including the current file name.
+ expires_in
+ number of seconds until the signed URL expires.
+ options
+ options to be passed for downloading the file.
+ """
+ json = {"paths": paths, "expiresIn": str(expires_in)}
+ download_query = ""
+ if options.get("download"):
+ json.update({"download": options.get("download")})
+
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+
+ response = await self._request(
+ "POST",
+ f"/object/sign/{self.id}",
+ json=json,
+ )
+ data = response.json()
+ signed_urls = []
+ for item in data:
+
+ # Prepare URL
+ url = urllib.parse.urlparse(item["signedURL"])
+ url = urllib.parse.quote(url.path) + f"?{url.query}"
+
+ signedURL = (
+ f"{self._client.base_url}{cast(str, url).lstrip('/')}{download_query}"
+ )
+ signed_item: CreateSignedUrlResponse = {
+ "error": item["error"],
+ "path": item["path"],
+ "signedURL": signedURL,
+ "signedUrl": signedURL,
+ }
+ signed_urls.append(signed_item)
+ return signed_urls
+
+ async def get_public_url(self, path: str, options: URLOptions = {}) -> str:
+ """
+ Parameters
+ ----------
+ path
+ file path, including the path and file name. For example `folder/image.png`.
+ """
+ _query_string = []
+ download_query = ""
+ if options.get("download"):
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+
+ if download_query:
+ _query_string.append(download_query)
+
+ render_path = "render/image" if options.get("transform") else "object"
+ transformation_query = (
+ urllib.parse.urlencode(options.get("transform"))
+ if options.get("transform")
+ else None
+ )
+
+ if transformation_query:
+ _query_string.append(transformation_query)
+
+ query_string = "&".join(_query_string)
+ query_string = f"?{query_string}"
+ _path = self._get_final_path(path)
+ return f"{self._client.base_url}{render_path}/public/{_path}{query_string}"
+
+ async def move(self, from_path: str, to_path: str) -> dict[str, str]:
+ """
+ Moves an existing file, optionally renaming it at the same time.
+
+ Parameters
+ ----------
+ from_path
+ The original file path, including the current file name. For example `folder/image.png`.
+ to_path
+ The new file path, including the new file name. For example `folder/image-copy.png`.
+ """
+ res = await self._request(
+ "POST",
+ "/object/move",
+ json={
+ "bucketId": self.id,
+ "sourceKey": from_path,
+ "destinationKey": to_path,
+ },
+ )
+ return res.json()
+
+ async def copy(self, from_path: str, to_path: str) -> dict[str, str]:
+ """
+ Copies an existing file to a new path in the same bucket.
+
+ Parameters
+ ----------
+ from_path
+ The original file path, including the current file name. For example `folder/image.png`.
+ to_path
+ The new file path, including the new file name. For example `folder/image-copy.png`.
+ """
+ res = await self._request(
+ "POST",
+ "/object/copy",
+ json={
+ "bucketId": self.id,
+ "sourceKey": from_path,
+ "destinationKey": to_path,
+ },
+ )
+ return res.json()
+
+ async def remove(self, paths: list) -> list[dict[str, Any]]:
+ """
+ Deletes files within the same bucket
+
+ Parameters
+ ----------
+ paths
+ An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`].
+ """
+ response = await self._request(
+ "DELETE",
+ f"/object/{self.id}",
+ json={"prefixes": paths},
+ )
+ return response.json()
+
+ async def info(
+ self,
+ path: str,
+ ) -> list[dict[str, str]]:
+ """
+ Lists info for a particular file.
+
+ Parameters
+ ----------
+ path
+ The path to the file.
+ """
+ response = await self._request(
+ "GET",
+ f"/object/info/{self.id}/{path}",
+ )
+ return response.json()
+
+ async def exists(
+ self,
+ path: str,
+ ) -> bool:
+ """
+ Returns True if the file exists, False otherwise.
+
+ Parameters
+ ----------
+ path
+ The path to the file.
+ """
+ try:
+ response = await self._request(
+ "HEAD",
+ f"/object/{self.id}/{path}",
+ )
+ return response.status_code == 200
+ except json.JSONDecodeError:
+ return False
+
+ async def list(
+ self,
+ path: Optional[str] = None,
+ options: Optional[ListBucketFilesOptions] = None,
+ ) -> list[dict[str, str]]:
+ """
+ Lists all the files within a bucket.
+
+ Parameters
+ ----------
+ path
+ The folder path.
+ options
+ Search options, including `limit`, `offset`, `sortBy` and `search`.
+ """
+ extra_options = options or {}
+ extra_headers = {"Content-Type": "application/json"}
+ body = {
+ **DEFAULT_SEARCH_OPTIONS,
+ **extra_options,
+ "prefix": path or "",
+ }
+ response = await self._request(
+ "POST",
+ f"/object/list/{self.id}",
+ json=body,
+ headers=extra_headers,
+ )
+ return response.json()
+
+ async def download(self, path: str, options: DownloadOptions = {}) -> bytes:
+ """
+ Downloads a file.
+
+ Parameters
+ ----------
+ path
+ The file path to be downloaded, including the path and file name. For example `folder/image.png`.
+ """
+ render_path = (
+ "render/image/authenticated" if options.get("transform") else "object"
+ )
+ transformation_query = urllib.parse.urlencode(options.get("transform") or {})
+ query_string = f"?{transformation_query}" if transformation_query else ""
+
+ _path = self._get_final_path(path)
+ response = await self._request(
+ "GET",
+ f"{render_path}/{_path}{query_string}",
+ )
+ return response.content
+
+ async def _upload_or_update(
+ self,
+ method: Literal["POST", "PUT"],
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Uploads a file to an existing bucket.
+
+ Parameters
+ ----------
+ path
+ The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.
+ The bucket must already exist before attempting to upload.
+ file
+ The File object to be stored in the bucket. or a async generator of chunks
+ file_options
+ HTTP headers.
+ """
+ if file_options is None:
+ file_options = {}
+ cache_control = file_options.pop("cache-control", None)
+ _data = {}
+
+ upsert = file_options.pop("upsert", None)
+ if upsert:
+ file_options.update({"x-upsert": upsert})
+
+ metadata = file_options.pop("metadata", None)
+ file_opts_headers = file_options.pop("headers", None)
+
+ headers = {
+ **self._client.headers,
+ **DEFAULT_FILE_OPTIONS,
+ **file_options,
+ }
+
+ if metadata:
+ metadata_str = json.dumps(metadata)
+ headers["x-metadata"] = base64.b64encode(metadata_str.encode())
+ _data.update({"metadata": metadata_str})
+
+ if file_opts_headers:
+ headers.update({**file_opts_headers})
+
+ # Only include x-upsert on a POST method
+ if method != "POST":
+ del headers["x-upsert"]
+
+ filename = path.rsplit("/", maxsplit=1)[-1]
+
+ if cache_control:
+ headers["cache-control"] = f"max-age={cache_control}"
+ _data.update({"cacheControl": cache_control})
+
+ if (
+ isinstance(file, BufferedReader)
+ or isinstance(file, bytes)
+ or isinstance(file, FileIO)
+ ):
+ # bytes or byte-stream-like object received
+ files = {"file": (filename, file, headers.pop("content-type"))}
+ else:
+ # str or pathlib.path received
+ files = {
+ "file": (
+ filename,
+ open(file, "rb"),
+ headers.pop("content-type"),
+ )
+ }
+
+ _path = self._get_final_path(path)
+
+ response = await self._request(
+ method, f"/object/{_path}", files=files, headers=headers, data=_data
+ )
+
+ data: UploadData = response.json()
+
+ return UploadResponse(path=path, Key=data.get("Key"))
+
+ async def upload(
+ self,
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Uploads a file to an existing bucket.
+
+ Parameters
+ ----------
+ path
+ The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.
+ The bucket must already exist before attempting to upload.
+ file
+ The File object to be stored in the bucket. or a async generator of chunks
+ file_options
+ HTTP headers.
+ """
+ return await self._upload_or_update("POST", path, file, file_options)
+
+ async def update(
+ self,
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ return await self._upload_or_update("PUT", path, file, file_options)
+
+ def _get_final_path(self, path: str) -> str:
+ return f"{self.id}/{path}"
+
+
+@dataclass(repr=False)
+class AsyncBucket(BaseBucket):
+ """Represents a storage bucket."""
+
+
+@dataclass
+class AsyncBucketProxy(AsyncBucketActionsMixin):
+ """A bucket proxy, this contains the minimum required fields to query the File API."""
+
+ id: str
+ _client: AsyncClient = field(repr=False)
diff --git a/.venv/lib/python3.12/site-packages/storage3/_sync/__init__.py b/.venv/lib/python3.12/site-packages/storage3/_sync/__init__.py
new file mode 100644
index 00000000..9eedb131
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_sync/__init__.py
@@ -0,0 +1 @@
+from .client import SyncStorageClient as SyncStorageClient
diff --git a/.venv/lib/python3.12/site-packages/storage3/_sync/bucket.py b/.venv/lib/python3.12/site-packages/storage3/_sync/bucket.py
new file mode 100644
index 00000000..8700bf06
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_sync/bucket.py
@@ -0,0 +1,120 @@
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from httpx import HTTPStatusError, Response
+
+from ..exceptions import StorageApiError
+from ..types import CreateOrUpdateBucketOptions, RequestMethod
+from ..utils import SyncClient
+from .file_api import SyncBucket
+
+__all__ = ["SyncStorageBucketAPI"]
+
+
+class SyncStorageBucketAPI:
+ """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket"""
+
+ def __init__(self, session: SyncClient) -> None:
+ self._client = session
+
+ def _request(
+ self,
+ method: RequestMethod,
+ url: str,
+ json: Optional[dict[Any, Any]] = None,
+ ) -> Response:
+ try:
+ response = self._client.request(method, url, json=json)
+ response.raise_for_status()
+ except HTTPStatusError as exc:
+ resp = exc.response.json()
+ raise StorageApiError(resp["message"], resp["error"], resp["statusCode"])
+
+ return response
+
+ def list_buckets(self) -> list[SyncBucket]:
+ """Retrieves the details of all storage buckets within an existing product."""
+ # if the request doesn't error, it is assured to return a list
+ res = self._request("GET", "/bucket")
+ return [SyncBucket(**bucket) for bucket in res.json()]
+
+ def get_bucket(self, id: str) -> SyncBucket:
+ """Retrieves the details of an existing storage bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to retrieve.
+ """
+ res = self._request("GET", f"/bucket/{id}")
+ json = res.json()
+ return SyncBucket(**json)
+
+ def create_bucket(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ options: Optional[CreateOrUpdateBucketOptions] = None,
+ ) -> dict[str, str]:
+ """Creates a new storage bucket.
+
+ Parameters
+ ----------
+ id
+ A unique identifier for the bucket you are creating.
+ name
+ A name for the bucket you are creating. If not passed, the id is used as the name as well.
+ options
+ Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and
+ `allowed_mime_types`.
+ """
+ json: dict[str, Any] = {"id": id, "name": name or id}
+ if options:
+ json.update(**options)
+ res = self._request(
+ "POST",
+ "/bucket",
+ json=json,
+ )
+ return res.json()
+
+ def update_bucket(
+ self, id: str, options: CreateOrUpdateBucketOptions
+ ) -> dict[str, str]:
+ """Update a storage bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to update.
+ options
+ The properties you want to update. Valid options are `public`, `file_size_limit` and
+ `allowed_mime_types`.
+ """
+ json = {"id": id, "name": id, **options}
+ res = self._request("PUT", f"/bucket/{id}", json=json)
+ return res.json()
+
+ def empty_bucket(self, id: str) -> dict[str, str]:
+ """Removes all objects inside a single bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to empty.
+ """
+ res = self._request("POST", f"/bucket/{id}/empty", json={})
+ return res.json()
+
+ def delete_bucket(self, id: str) -> dict[str, str]:
+ """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first
+ `empty()` the bucket.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket you would like to delete.
+ """
+ res = self._request("DELETE", f"/bucket/{id}", json={})
+ return res.json()
diff --git a/.venv/lib/python3.12/site-packages/storage3/_sync/client.py b/.venv/lib/python3.12/site-packages/storage3/_sync/client.py
new file mode 100644
index 00000000..d2a2e299
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_sync/client.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from storage3.constants import DEFAULT_TIMEOUT
+
+from ..utils import SyncClient
+from ..version import __version__
+from .bucket import SyncStorageBucketAPI
+from .file_api import SyncBucketProxy
+
+__all__ = [
+ "SyncStorageClient",
+]
+
+
+class SyncStorageClient(SyncStorageBucketAPI):
+ """Manage storage buckets and files."""
+
+ def __init__(
+ self,
+ url: str,
+ headers: dict[str, str],
+ timeout: int = DEFAULT_TIMEOUT,
+ verify: bool = True,
+ proxy: Optional[str] = None,
+ ) -> None:
+ headers = {
+ "User-Agent": f"supabase-py/storage3 v{__version__}",
+ **headers,
+ }
+ self.session = self._create_session(url, headers, timeout, verify, proxy)
+ super().__init__(self.session)
+
+ def _create_session(
+ self,
+ base_url: str,
+ headers: dict[str, str],
+ timeout: int,
+ verify: bool = True,
+ proxy: Optional[str] = None,
+ ) -> SyncClient:
+ return SyncClient(
+ base_url=base_url,
+ headers=headers,
+ timeout=timeout,
+ proxy=proxy,
+ verify=bool(verify),
+ follow_redirects=True,
+ http2=True,
+ )
+
+ def __enter__(self) -> SyncStorageClient:
+ return self
+
+ def __exit__(self, exc_type, exc, tb) -> None:
+ self.aclose()
+
+ def aclose(self) -> None:
+ self.session.aclose()
+
+ def from_(self, id: str) -> SyncBucketProxy:
+ """Run a storage file operation.
+
+ Parameters
+ ----------
+ id
+ The unique identifier of the bucket
+ """
+ return SyncBucketProxy(id, self._client)
diff --git a/.venv/lib/python3.12/site-packages/storage3/_sync/file_api.py b/.venv/lib/python3.12/site-packages/storage3/_sync/file_api.py
new file mode 100644
index 00000000..5ee5e9f6
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/_sync/file_api.py
@@ -0,0 +1,559 @@
+from __future__ import annotations
+
+import base64
+import json
+import urllib.parse
+from dataclasses import dataclass, field
+from io import BufferedReader, FileIO
+from pathlib import Path
+from typing import Any, Literal, Optional, Union, cast
+
+from httpx import HTTPStatusError, Response
+
+from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS
+from ..exceptions import StorageApiError
+from ..types import (
+ BaseBucket,
+ CreateSignedUrlResponse,
+ CreateSignedURLsOptions,
+ DownloadOptions,
+ FileOptions,
+ ListBucketFilesOptions,
+ RequestMethod,
+ SignedUploadURL,
+ SignedUrlResponse,
+ UploadData,
+ UploadResponse,
+ URLOptions,
+)
+from ..utils import StorageException, SyncClient
+
+__all__ = ["SyncBucket"]
+
+
+class SyncBucketActionsMixin:
+ """Functions needed to access the file API."""
+
+ id: str
+ _client: SyncClient
+
+ def _request(
+ self,
+ method: RequestMethod,
+ url: str,
+ headers: Optional[dict[str, Any]] = None,
+ json: Optional[dict[Any, Any]] = None,
+ files: Optional[Any] = None,
+ **kwargs: Any,
+ ) -> Response:
+ try:
+ response = self._client.request(
+ method, url, headers=headers or {}, json=json, files=files, **kwargs
+ )
+ response.raise_for_status()
+ except HTTPStatusError as exc:
+ resp = exc.response.json()
+ raise StorageApiError(resp["message"], resp["error"], resp["statusCode"])
+
+ # close the resource before returning the response
+ if files and "file" in files and isinstance(files["file"][1], BufferedReader):
+ files["file"][1].close()
+
+ return response
+
+ def create_signed_upload_url(self, path: str) -> SignedUploadURL:
+ """
+ Creates a signed upload URL.
+
+ Parameters
+ ----------
+ path
+ The file path, including the file name. For example `folder/image.png`.
+ """
+ _path = self._get_final_path(path)
+ response = self._request("POST", f"/object/upload/sign/{_path}")
+ data = response.json()
+ full_url: urllib.parse.ParseResult = urllib.parse.urlparse(
+ str(self._client.base_url) + cast(str, data["url"]).lstrip("/")
+ )
+ query_params = urllib.parse.parse_qs(full_url.query)
+ if not query_params.get("token"):
+ raise StorageException("No token sent by the API")
+ return {
+ "signed_url": full_url.geturl(),
+ "signedUrl": full_url.geturl(),
+ "token": query_params["token"][0],
+ "path": path,
+ }
+
+ def upload_to_signed_url(
+ self,
+ path: str,
+ token: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Upload a file with a token generated from :meth:`.create_signed_url`
+
+ Parameters
+ ----------
+ path
+ The file path, including the file name
+ token
+ The token generated from :meth:`.create_signed_url`
+ file
+ The file contents or a file-like object to upload
+ file_options
+ Additional options for the uploaded file
+ """
+ _path = self._get_final_path(path)
+ _url = urllib.parse.urlparse(f"/object/upload/sign/{_path}")
+ query_params = urllib.parse.urlencode({"token": token})
+ final_url = f"{_url.geturl()}?{query_params}"
+
+ if file_options is None:
+ file_options = {}
+
+ cache_control = file_options.get("cache-control")
+ # cacheControl is also passed as form data
+ # https://github.com/supabase/storage-js/blob/fa44be8156295ba6320ffeff96bdf91016536a46/src/packages/StorageFileApi.ts#L89
+ _data = {}
+ if cache_control:
+ file_options["cache-control"] = f"max-age={cache_control}"
+ _data = {"cacheControl": cache_control}
+ headers = {
+ **self._client.headers,
+ **DEFAULT_FILE_OPTIONS,
+ **file_options,
+ }
+ filename = path.rsplit("/", maxsplit=1)[-1]
+
+ if (
+ isinstance(file, BufferedReader)
+ or isinstance(file, bytes)
+ or isinstance(file, FileIO)
+ ):
+ # bytes or byte-stream-like object received
+ _file = {"file": (filename, file, headers.pop("content-type"))}
+ else:
+ # str or pathlib.path received
+ _file = {
+ "file": (
+ filename,
+ open(file, "rb"),
+ headers.pop("content-type"),
+ )
+ }
+ response = self._request(
+ "PUT", final_url, files=_file, headers=headers, data=_data
+ )
+ data: UploadData = response.json()
+
+ return UploadResponse(path=path, Key=data.get("Key"))
+
+ def create_signed_url(
+ self, path: str, expires_in: int, options: URLOptions = {}
+ ) -> SignedUrlResponse:
+ """
+ Parameters
+ ----------
+ path
+ file path to be downloaded, including the current file name.
+ expires_in
+ number of seconds until the signed URL expires.
+ options
+ options to be passed for downloading or transforming the file.
+ """
+ json = {"expiresIn": str(expires_in)}
+ download_query = ""
+ if options.get("download"):
+ json.update({"download": options["download"]})
+
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+ if options.get("transform"):
+ json.update({"transform": options["transform"]})
+
+ path = self._get_final_path(path)
+ response = self._request(
+ "POST",
+ f"/object/sign/{path}",
+ json=json,
+ )
+ data = response.json()
+
+ # Prepare URL
+ url = urllib.parse.urlparse(data["signedURL"])
+ url = urllib.parse.quote(url.path) + f"?{url.query}"
+
+ signedURL = (
+ f"{self._client.base_url}{cast(str, url).lstrip('/')}{download_query}"
+ )
+ data: SignedUrlResponse = {"signedURL": signedURL, "signedUrl": signedURL}
+ return data
+
+ def create_signed_urls(
+ self, paths: list[str], expires_in: int, options: CreateSignedURLsOptions = {}
+ ) -> list[CreateSignedUrlResponse]:
+ """
+ Parameters
+ ----------
+ path
+ file path to be downloaded, including the current file name.
+ expires_in
+ number of seconds until the signed URL expires.
+ options
+ options to be passed for downloading the file.
+ """
+ json = {"paths": paths, "expiresIn": str(expires_in)}
+ download_query = ""
+ if options.get("download"):
+ json.update({"download": options.get("download")})
+
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+
+ response = self._request(
+ "POST",
+ f"/object/sign/{self.id}",
+ json=json,
+ )
+ data = response.json()
+ signed_urls = []
+ for item in data:
+
+ # Prepare URL
+ url = urllib.parse.urlparse(item["signedURL"])
+ url = urllib.parse.quote(url.path) + f"?{url.query}"
+
+ signedURL = (
+ f"{self._client.base_url}{cast(str, url).lstrip('/')}{download_query}"
+ )
+ signed_item: CreateSignedUrlResponse = {
+ "error": item["error"],
+ "path": item["path"],
+ "signedURL": signedURL,
+ "signedUrl": signedURL,
+ }
+ signed_urls.append(signed_item)
+ return signed_urls
+
+ def get_public_url(self, path: str, options: URLOptions = {}) -> str:
+ """
+ Parameters
+ ----------
+ path
+ file path, including the path and file name. For example `folder/image.png`.
+ """
+ _query_string = []
+ download_query = ""
+ if options.get("download"):
+ download_query = (
+ "&download="
+ if options.get("download") is True
+ else f"&download={options.get('download')}"
+ )
+
+ if download_query:
+ _query_string.append(download_query)
+
+ render_path = "render/image" if options.get("transform") else "object"
+ transformation_query = (
+ urllib.parse.urlencode(options.get("transform"))
+ if options.get("transform")
+ else None
+ )
+
+ if transformation_query:
+ _query_string.append(transformation_query)
+
+ query_string = "&".join(_query_string)
+ query_string = f"?{query_string}"
+ _path = self._get_final_path(path)
+ return f"{self._client.base_url}{render_path}/public/{_path}{query_string}"
+
+ def move(self, from_path: str, to_path: str) -> dict[str, str]:
+ """
+ Moves an existing file, optionally renaming it at the same time.
+
+ Parameters
+ ----------
+ from_path
+ The original file path, including the current file name. For example `folder/image.png`.
+ to_path
+ The new file path, including the new file name. For example `folder/image-copy.png`.
+ """
+ res = self._request(
+ "POST",
+ "/object/move",
+ json={
+ "bucketId": self.id,
+ "sourceKey": from_path,
+ "destinationKey": to_path,
+ },
+ )
+ return res.json()
+
+ def copy(self, from_path: str, to_path: str) -> dict[str, str]:
+ """
+ Copies an existing file to a new path in the same bucket.
+
+ Parameters
+ ----------
+ from_path
+ The original file path, including the current file name. For example `folder/image.png`.
+ to_path
+ The new file path, including the new file name. For example `folder/image-copy.png`.
+ """
+ res = self._request(
+ "POST",
+ "/object/copy",
+ json={
+ "bucketId": self.id,
+ "sourceKey": from_path,
+ "destinationKey": to_path,
+ },
+ )
+ return res.json()
+
+ def remove(self, paths: list) -> list[dict[str, Any]]:
+ """
+ Deletes files within the same bucket
+
+ Parameters
+ ----------
+ paths
+ An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`].
+ """
+ response = self._request(
+ "DELETE",
+ f"/object/{self.id}",
+ json={"prefixes": paths},
+ )
+ return response.json()
+
+ def info(
+ self,
+ path: str,
+ ) -> list[dict[str, str]]:
+ """
+ Lists info for a particular file.
+
+ Parameters
+ ----------
+ path
+ The path to the file.
+ """
+ response = self._request(
+ "GET",
+ f"/object/info/{self.id}/{path}",
+ )
+ return response.json()
+
+ def exists(
+ self,
+ path: str,
+ ) -> bool:
+ """
+ Returns True if the file exists, False otherwise.
+
+ Parameters
+ ----------
+ path
+ The path to the file.
+ """
+ try:
+ response = self._request(
+ "HEAD",
+ f"/object/{self.id}/{path}",
+ )
+ return response.status_code == 200
+ except json.JSONDecodeError:
+ return False
+
+ def list(
+ self,
+ path: Optional[str] = None,
+ options: Optional[ListBucketFilesOptions] = None,
+ ) -> list[dict[str, str]]:
+ """
+ Lists all the files within a bucket.
+
+ Parameters
+ ----------
+ path
+ The folder path.
+ options
+ Search options, including `limit`, `offset`, `sortBy` and `search`.
+ """
+ extra_options = options or {}
+ extra_headers = {"Content-Type": "application/json"}
+ body = {
+ **DEFAULT_SEARCH_OPTIONS,
+ **extra_options,
+ "prefix": path or "",
+ }
+ response = self._request(
+ "POST",
+ f"/object/list/{self.id}",
+ json=body,
+ headers=extra_headers,
+ )
+ return response.json()
+
+ def download(self, path: str, options: DownloadOptions = {}) -> bytes:
+ """
+ Downloads a file.
+
+ Parameters
+ ----------
+ path
+ The file path to be downloaded, including the path and file name. For example `folder/image.png`.
+ """
+ render_path = (
+ "render/image/authenticated" if options.get("transform") else "object"
+ )
+ transformation_query = urllib.parse.urlencode(options.get("transform") or {})
+ query_string = f"?{transformation_query}" if transformation_query else ""
+
+ _path = self._get_final_path(path)
+ response = self._request(
+ "GET",
+ f"{render_path}/{_path}{query_string}",
+ )
+ return response.content
+
+ def _upload_or_update(
+ self,
+ method: Literal["POST", "PUT"],
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Uploads a file to an existing bucket.
+
+ Parameters
+ ----------
+ path
+ The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.
+ The bucket must already exist before attempting to upload.
+ file
+ The File object to be stored in the bucket. or a async generator of chunks
+ file_options
+ HTTP headers.
+ """
+ if file_options is None:
+ file_options = {}
+ cache_control = file_options.pop("cache-control", None)
+ _data = {}
+
+ upsert = file_options.pop("upsert", None)
+ if upsert:
+ file_options.update({"x-upsert": upsert})
+
+ metadata = file_options.pop("metadata", None)
+ file_opts_headers = file_options.pop("headers", None)
+
+ headers = {
+ **self._client.headers,
+ **DEFAULT_FILE_OPTIONS,
+ **file_options,
+ }
+
+ if metadata:
+ metadata_str = json.dumps(metadata)
+ headers["x-metadata"] = base64.b64encode(metadata_str.encode())
+ _data.update({"metadata": metadata_str})
+
+ if file_opts_headers:
+ headers.update({**file_opts_headers})
+
+ # Only include x-upsert on a POST method
+ if method != "POST":
+ del headers["x-upsert"]
+
+ filename = path.rsplit("/", maxsplit=1)[-1]
+
+ if cache_control:
+ headers["cache-control"] = f"max-age={cache_control}"
+ _data.update({"cacheControl": cache_control})
+
+ if (
+ isinstance(file, BufferedReader)
+ or isinstance(file, bytes)
+ or isinstance(file, FileIO)
+ ):
+ # bytes or byte-stream-like object received
+ files = {"file": (filename, file, headers.pop("content-type"))}
+ else:
+ # str or pathlib.path received
+ files = {
+ "file": (
+ filename,
+ open(file, "rb"),
+ headers.pop("content-type"),
+ )
+ }
+
+ _path = self._get_final_path(path)
+
+ response = self._request(
+ method, f"/object/{_path}", files=files, headers=headers, data=_data
+ )
+
+ data: UploadData = response.json()
+
+ return UploadResponse(path=path, Key=data.get("Key"))
+
+ def upload(
+ self,
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ """
+ Uploads a file to an existing bucket.
+
+ Parameters
+ ----------
+ path
+ The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.
+ The bucket must already exist before attempting to upload.
+ file
+ The File object to be stored in the bucket. or a async generator of chunks
+ file_options
+ HTTP headers.
+ """
+ return self._upload_or_update("POST", path, file, file_options)
+
+ def update(
+ self,
+ path: str,
+ file: Union[BufferedReader, bytes, FileIO, str, Path],
+ file_options: Optional[FileOptions] = None,
+ ) -> UploadResponse:
+ return self._upload_or_update("PUT", path, file, file_options)
+
+ def _get_final_path(self, path: str) -> str:
+ return f"{self.id}/{path}"
+
+
+@dataclass(repr=False)
+class SyncBucket(BaseBucket):
+ """Represents a storage bucket."""
+
+
+@dataclass
+class SyncBucketProxy(SyncBucketActionsMixin):
+ """A bucket proxy, this contains the minimum required fields to query the File API."""
+
+ id: str
+ _client: SyncClient = field(repr=False)
diff --git a/.venv/lib/python3.12/site-packages/storage3/constants.py b/.venv/lib/python3.12/site-packages/storage3/constants.py
new file mode 100644
index 00000000..7bfc2f32
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/constants.py
@@ -0,0 +1,15 @@
+DEFAULT_SEARCH_OPTIONS = {
+ "limit": 100,
+ "offset": 0,
+ "sortBy": {
+ "column": "name",
+ "order": "asc",
+ },
+}
+DEFAULT_FILE_OPTIONS = {
+ "cache-control": "3600",
+ "content-type": "text/plain;charset=UTF-8",
+ "x-upsert": "false",
+}
+
+DEFAULT_TIMEOUT = 20
diff --git a/.venv/lib/python3.12/site-packages/storage3/exceptions.py b/.venv/lib/python3.12/site-packages/storage3/exceptions.py
new file mode 100644
index 00000000..098b9d11
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/exceptions.py
@@ -0,0 +1,33 @@
+from typing import TypedDict
+
+from .utils import StorageException
+
+
+class StorageApiErrorDict(TypedDict):
+ name: str
+ message: str
+ status: int
+
+
+class StorageApiError(StorageException):
+ """Error raised when an operation on the storage API fails."""
+
+ def __init__(self, message: str, code: str, status: int) -> None:
+ error_message = "{{'statusCode': {}, 'error': {}, 'message': {}}}".format(
+ status,
+ code,
+ message,
+ )
+ super().__init__(error_message)
+ self.name = "StorageApiError"
+ self.message = message
+ self.code = code
+ self.status = status
+
+ def to_dict(self) -> StorageApiErrorDict:
+ return {
+ "name": self.name,
+ "code": self.code,
+ "message": self.message,
+ "status": self.status,
+ }
diff --git a/.venv/lib/python3.12/site-packages/storage3/types.py b/.venv/lib/python3.12/site-packages/storage3/types.py
new file mode 100644
index 00000000..56ec998c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/types.py
@@ -0,0 +1,121 @@
+from __future__ import annotations
+
+from dataclasses import asdict, dataclass
+from datetime import datetime
+from typing import Any, Dict, Literal, Optional, TypedDict, Union
+
+import dateutil.parser
+
+RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"]
+
+
+@dataclass
+class BaseBucket:
+ """Represents a file storage bucket."""
+
+ id: str
+ name: str
+ owner: str
+ public: bool
+ created_at: datetime
+ updated_at: datetime
+ file_size_limit: Optional[int]
+ allowed_mime_types: Optional[list[str]]
+
+ def __post_init__(self) -> None:
+ # created_at and updated_at are returned by the API as ISO timestamps
+ # so we convert them to datetime objects
+ self.created_at = dateutil.parser.isoparse(self.created_at) # type: ignore
+ self.updated_at = dateutil.parser.isoparse(self.updated_at) # type: ignore
+
+
+# used in bucket.list method's option parameter
+class _sortByType(TypedDict, total=False):
+ column: str
+ order: Literal["asc", "desc"]
+
+
+class SignedUploadURL(TypedDict):
+ signed_url: str
+ signedUrl: str
+ token: str
+ path: str
+
+
+class CreateOrUpdateBucketOptions(TypedDict, total=False):
+ public: bool
+ file_size_limit: int
+ allowed_mime_types: list[str]
+
+
+class ListBucketFilesOptions(TypedDict, total=False):
+ limit: int
+ offset: int
+ sortBy: _sortByType
+ search: str
+
+
+class TransformOptions(TypedDict, total=False):
+ height: int
+ width: int
+ resize: Literal["cover", "contain", "fill"]
+ format: Literal["origin", "avif"]
+ quality: int
+
+
+class URLOptions(TypedDict, total=False):
+ download: Union[str, bool]
+ transform: TransformOptions
+
+
+class CreateSignedURLsOptions(TypedDict, total=False):
+ download: Union[str, bool]
+
+
+class DownloadOptions(TypedDict, total=False):
+ transform: TransformOptions
+
+
+FileOptions = TypedDict(
+ "FileOptions",
+ {
+ "cache-control": str,
+ "content-type": str,
+ "x-upsert": str,
+ "upsert": str,
+ "metadata": Dict[str, Any],
+ "headers": Dict[str, str],
+ },
+ total=False,
+)
+
+
+class UploadData(TypedDict, total=False):
+ Id: str
+ Key: str
+
+
+@dataclass
+class UploadResponse:
+ path: str
+ full_path: str
+ fullPath: str
+
+ def __init__(self, path, Key):
+ self.path = path
+ self.full_path = Key
+ self.fullPath = Key
+
+ dict = asdict
+
+
+class SignedUrlResponse(TypedDict):
+ signedURL: str
+ signedUrl: str
+
+
+class CreateSignedUrlResponse(TypedDict):
+ error: Optional[str]
+ path: str
+ signedURL: str
+ signedUrl: str
diff --git a/.venv/lib/python3.12/site-packages/storage3/utils.py b/.venv/lib/python3.12/site-packages/storage3/utils.py
new file mode 100644
index 00000000..9689fc8e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/utils.py
@@ -0,0 +1,11 @@
+from httpx import AsyncClient as AsyncClient # noqa: F401
+from httpx import Client as BaseClient
+
+
+class SyncClient(BaseClient):
+ def aclose(self) -> None:
+ self.close()
+
+
+class StorageException(Exception):
+ """Error raised when an operation on the storage API fails."""
diff --git a/.venv/lib/python3.12/site-packages/storage3/version.py b/.venv/lib/python3.12/site-packages/storage3/version.py
new file mode 100644
index 00000000..4bb32721
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/storage3/version.py
@@ -0,0 +1 @@
+__version__ = "0.11.3" # {x-release-please-version}