about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/postgrest
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/postgrest')
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/__init__.py30
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_async/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_async/client.py128
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_async/request_builder.py415
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_sync/__init__.py1
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_sync/client.py128
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/_sync/request_builder.py415
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/base_client.py80
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/base_request_builder.py685
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/constants.py6
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/deprecated_client.py17
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/deprecated_get_request_builder.py14
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/exceptions.py50
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/py.typed0
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/types.py58
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/utils.py68
-rw-r--r--.venv/lib/python3.12/site-packages/postgrest/version.py1
17 files changed, 2097 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/postgrest/__init__.py b/.venv/lib/python3.12/site-packages/postgrest/__init__.py
new file mode 100644
index 00000000..f060e684
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/__init__.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from httpx import Timeout
+
+from ._async.client import AsyncPostgrestClient
+from ._async.request_builder import (
+    AsyncFilterRequestBuilder,
+    AsyncMaybeSingleRequestBuilder,
+    AsyncQueryRequestBuilder,
+    AsyncRequestBuilder,
+    AsyncRPCFilterRequestBuilder,
+    AsyncSelectRequestBuilder,
+    AsyncSingleRequestBuilder,
+)
+from ._sync.client import SyncPostgrestClient
+from ._sync.request_builder import (
+    SyncFilterRequestBuilder,
+    SyncMaybeSingleRequestBuilder,
+    SyncQueryRequestBuilder,
+    SyncRequestBuilder,
+    SyncRPCFilterRequestBuilder,
+    SyncSelectRequestBuilder,
+    SyncSingleRequestBuilder,
+)
+from .base_request_builder import APIResponse
+from .constants import DEFAULT_POSTGREST_CLIENT_HEADERS
+from .deprecated_client import Client, PostgrestClient
+from .deprecated_get_request_builder import GetRequestBuilder
+from .exceptions import APIError
+from .version import __version__
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_async/__init__.py b/.venv/lib/python3.12/site-packages/postgrest/_async/__init__.py
new file mode 100644
index 00000000..9d48db4f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_async/__init__.py
@@ -0,0 +1 @@
+from __future__ import annotations
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_async/client.py b/.venv/lib/python3.12/site-packages/postgrest/_async/client.py
new file mode 100644
index 00000000..b9d266dd
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_async/client.py
@@ -0,0 +1,128 @@
+from __future__ import annotations
+
+from typing import Any, Dict, Optional, Union, cast
+
+from deprecation import deprecated
+from httpx import Headers, QueryParams, Timeout
+
+from ..base_client import BasePostgrestClient
+from ..constants import (
+    DEFAULT_POSTGREST_CLIENT_HEADERS,
+    DEFAULT_POSTGREST_CLIENT_TIMEOUT,
+)
+from ..types import CountMethod
+from ..utils import AsyncClient
+from ..version import __version__
+from .request_builder import AsyncRequestBuilder, AsyncRPCFilterRequestBuilder
+
+_TableT = Dict[str, Any]
+
+
+class AsyncPostgrestClient(BasePostgrestClient):
+    """PostgREST client."""
+
+    def __init__(
+        self,
+        base_url: str,
+        *,
+        schema: str = "public",
+        headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS,
+        timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT,
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> None:
+        BasePostgrestClient.__init__(
+            self,
+            base_url,
+            schema=schema,
+            headers=headers,
+            timeout=timeout,
+            verify=verify,
+            proxy=proxy,
+        )
+        self.session = cast(AsyncClient, self.session)
+
+    def create_session(
+        self,
+        base_url: str,
+        headers: Dict[str, str],
+        timeout: Union[int, float, Timeout],
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> AsyncClient:
+        return AsyncClient(
+            base_url=base_url,
+            headers=headers,
+            timeout=timeout,
+            verify=verify,
+            proxy=proxy,
+            follow_redirects=True,
+            http2=True,
+        )
+
+    async def __aenter__(self) -> AsyncPostgrestClient:
+        return self
+
+    async def __aexit__(self, exc_type, exc, tb) -> None:
+        await self.aclose()
+
+    async def aclose(self) -> None:
+        """Close the underlying HTTP connections."""
+        await self.session.aclose()
+
+    def from_(self, table: str) -> AsyncRequestBuilder[_TableT]:
+        """Perform a table operation.
+
+        Args:
+            table: The name of the table
+        Returns:
+            :class:`AsyncRequestBuilder`
+        """
+        return AsyncRequestBuilder[_TableT](self.session, f"/{table}")
+
+    def table(self, table: str) -> AsyncRequestBuilder[_TableT]:
+        """Alias to :meth:`from_`."""
+        return self.from_(table)
+
+    @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead")
+    def from_table(self, table: str) -> AsyncRequestBuilder:
+        """Alias to :meth:`from_`."""
+        return self.from_(table)
+
+    def rpc(
+        self,
+        func: str,
+        params: dict,
+        count: Optional[CountMethod] = None,
+        head: bool = False,
+        get: bool = False,
+    ) -> AsyncRPCFilterRequestBuilder[Any]:
+        """Perform a stored procedure call.
+
+        Args:
+            func: The name of the remote procedure to run.
+            params: The parameters to be passed to the remote procedure.
+            count: The method to use to get the count of rows returned.
+            head: When set to `true`, `data` will not be returned. Useful if you only need the count.
+            get: When set to `true`, the function will be called with read-only access mode.
+        Returns:
+            :class:`AsyncRPCFilterRequestBuilder`
+        Example:
+            .. code-block:: python
+
+                await client.rpc("foobar", {"arg": "value"}).execute()
+
+        .. versionchanged:: 0.10.9
+            This method now returns a :class:`AsyncRPCFilterRequestBuilder`.
+        .. versionchanged:: 0.10.2
+            This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to
+            filter on the RPC's resultset.
+        """
+        method = "HEAD" if head else "GET" if get else "POST"
+
+        headers = Headers({"Prefer": f"count={count}"}) if count else Headers()
+
+        # the params here are params to be sent to the RPC and not the queryparams!
+        return AsyncRPCFilterRequestBuilder[Any](
+            self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params
+        )
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_async/request_builder.py b/.venv/lib/python3.12/site-packages/postgrest/_async/request_builder.py
new file mode 100644
index 00000000..2892fa36
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_async/request_builder.py
@@ -0,0 +1,415 @@
+from __future__ import annotations
+
+from json import JSONDecodeError
+from typing import Any, Generic, Optional, TypeVar, Union
+
+from httpx import Headers, QueryParams
+from pydantic import ValidationError
+
+from ..base_request_builder import (
+    APIResponse,
+    BaseFilterRequestBuilder,
+    BaseRPCRequestBuilder,
+    BaseSelectRequestBuilder,
+    CountMethod,
+    SingleAPIResponse,
+    pre_delete,
+    pre_insert,
+    pre_select,
+    pre_update,
+    pre_upsert,
+)
+from ..exceptions import APIError, generate_default_error_message
+from ..types import ReturnMethod
+from ..utils import AsyncClient, get_origin_and_cast
+
+_ReturnT = TypeVar("_ReturnT")
+
+
+class AsyncQueryRequestBuilder(Generic[_ReturnT]):
+    def __init__(
+        self,
+        session: AsyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        self.session = session
+        self.path = path
+        self.http_method = http_method
+        self.headers = headers
+        self.params = params
+        self.json = None if http_method in {"GET", "HEAD"} else json
+
+    async def execute(self) -> APIResponse[_ReturnT]:
+        """Execute the query.
+
+        .. tip::
+            This is the last method called, after the query is built.
+
+        Returns:
+            :class:`APIResponse`
+
+        Raises:
+            :class:`APIError` If the API raised an error.
+        """
+        r = await self.session.request(
+            self.http_method,
+            self.path,
+            json=self.json,
+            params=self.params,
+            headers=self.headers,
+        )
+        try:
+            if r.is_success:
+                if self.http_method != "HEAD":
+                    body = r.text
+                    if self.headers.get("Accept") == "text/csv":
+                        return body
+                    if self.headers.get(
+                        "Accept"
+                    ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"):
+                        if "+json" not in self.headers.get("Accept"):
+                            return body
+                return APIResponse[_ReturnT].from_http_request_response(r)
+            else:
+                raise APIError(r.json())
+        except ValidationError as e:
+            raise APIError(r.json()) from e
+        except JSONDecodeError:
+            raise APIError(generate_default_error_message(r))
+
+
+class AsyncSingleRequestBuilder(Generic[_ReturnT]):
+    def __init__(
+        self,
+        session: AsyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        self.session = session
+        self.path = path
+        self.http_method = http_method
+        self.headers = headers
+        self.params = params
+        self.json = json
+
+    async def execute(self) -> SingleAPIResponse[_ReturnT]:
+        """Execute the query.
+
+        .. tip::
+            This is the last method called, after the query is built.
+
+        Returns:
+            :class:`SingleAPIResponse`
+
+        Raises:
+            :class:`APIError` If the API raised an error.
+        """
+        r = await self.session.request(
+            self.http_method,
+            self.path,
+            json=self.json,
+            params=self.params,
+            headers=self.headers,
+        )
+        try:
+            if (
+                200 <= r.status_code <= 299
+            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
+                return SingleAPIResponse[_ReturnT].from_http_request_response(r)
+            else:
+                raise APIError(r.json())
+        except ValidationError as e:
+            raise APIError(r.json()) from e
+        except JSONDecodeError:
+            raise APIError(generate_default_error_message(r))
+
+
+class AsyncMaybeSingleRequestBuilder(AsyncSingleRequestBuilder[_ReturnT]):
+    async def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]:
+        r = None
+        try:
+            r = await AsyncSingleRequestBuilder[_ReturnT].execute(self)
+        except APIError as e:
+            if e.details and "The result contains 0 rows" in e.details:
+                return None
+        if not r:
+            raise APIError(
+                {
+                    "message": "Missing response",
+                    "code": "204",
+                    "hint": "Please check traceback of the code",
+                    "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.",
+                }
+            )
+        return r
+
+
+# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319
+class AsyncFilterRequestBuilder(BaseFilterRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT]):  # type: ignore
+    def __init__(
+        self,
+        session: AsyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+
+# this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf
+class AsyncRPCFilterRequestBuilder(
+    BaseRPCRequestBuilder[_ReturnT], AsyncSingleRequestBuilder[_ReturnT]
+):
+    def __init__(
+        self,
+        session: AsyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(AsyncSingleRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+
+# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319
+class AsyncSelectRequestBuilder(BaseSelectRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT]):  # type: ignore
+    def __init__(
+        self,
+        session: AsyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+    def single(self) -> AsyncSingleRequestBuilder[_ReturnT]:
+        """Specify that the query will only return a single row in response.
+
+        .. caution::
+            The API will raise an error if the query returned more than one row.
+        """
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return AsyncSingleRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def maybe_single(self) -> AsyncMaybeSingleRequestBuilder[_ReturnT]:
+        """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error."""
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return AsyncMaybeSingleRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def text_search(
+        self, column: str, query: str, options: dict[str, Any] = {}
+    ) -> AsyncFilterRequestBuilder[_ReturnT]:
+        type_ = options.get("type")
+        type_part = ""
+        if type_ == "plain":
+            type_part = "pl"
+        elif type_ == "phrase":
+            type_part = "ph"
+        elif type_ == "web_search":
+            type_part = "w"
+        config_part = f"({options.get('config')})" if options.get("config") else ""
+        self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}")
+
+        return AsyncQueryRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def csv(self) -> AsyncSingleRequestBuilder[str]:
+        """Specify that the query must retrieve data as a single CSV string."""
+        self.headers["Accept"] = "text/csv"
+        return AsyncSingleRequestBuilder[str](
+            session=self.session,  # type: ignore
+            path=self.path,
+            http_method=self.http_method,
+            headers=self.headers,
+            params=self.params,
+            json=self.json,
+        )
+
+
+class AsyncRequestBuilder(Generic[_ReturnT]):
+    def __init__(self, session: AsyncClient, path: str) -> None:
+        self.session = session
+        self.path = path
+
+    def select(
+        self,
+        *columns: str,
+        count: Optional[CountMethod] = None,
+        head: Optional[bool] = None,
+    ) -> AsyncSelectRequestBuilder[_ReturnT]:
+        """Run a SELECT query.
+
+        Args:
+            *columns: The names of the columns to fetch.
+            count: The method to use to get the count of rows returned.
+        Returns:
+            :class:`AsyncSelectRequestBuilder`
+        """
+        method, params, headers, json = pre_select(*columns, count=count, head=head)
+        return AsyncSelectRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def insert(
+        self,
+        json: Union[dict, list],
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+        upsert: bool = False,
+        default_to_null: bool = True,
+    ) -> AsyncQueryRequestBuilder[_ReturnT]:
+        """Run an INSERT query.
+
+        Args:
+            json: The row to be inserted.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+            upsert: Whether the query should be an upsert.
+            default_to_null: Make missing fields default to `null`.
+                Otherwise, use the default value for the column.
+                Only applies for bulk inserts.
+        Returns:
+            :class:`AsyncQueryRequestBuilder`
+        """
+        method, params, headers, json = pre_insert(
+            json,
+            count=count,
+            returning=returning,
+            upsert=upsert,
+            default_to_null=default_to_null,
+        )
+        return AsyncQueryRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def upsert(
+        self,
+        json: Union[dict, list],
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+        ignore_duplicates: bool = False,
+        on_conflict: str = "",
+        default_to_null: bool = True,
+    ) -> AsyncQueryRequestBuilder[_ReturnT]:
+        """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query.
+
+        Args:
+            json: The row to be inserted.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+            ignore_duplicates: Whether duplicate rows should be ignored.
+            on_conflict: Specified columns to be made to work with UNIQUE constraint.
+            default_to_null: Make missing fields default to `null`. Otherwise, use the
+                default value for the column. This only applies when inserting new rows,
+                not when merging with existing rows under `ignoreDuplicates: false`.
+                This also only applies when doing bulk upserts.
+        Returns:
+            :class:`AsyncQueryRequestBuilder`
+        """
+        method, params, headers, json = pre_upsert(
+            json,
+            count=count,
+            returning=returning,
+            ignore_duplicates=ignore_duplicates,
+            on_conflict=on_conflict,
+            default_to_null=default_to_null,
+        )
+        return AsyncQueryRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def update(
+        self,
+        json: dict,
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+    ) -> AsyncFilterRequestBuilder[_ReturnT]:
+        """Run an UPDATE query.
+
+        Args:
+            json: The updated fields.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+        Returns:
+            :class:`AsyncFilterRequestBuilder`
+        """
+        method, params, headers, json = pre_update(
+            json,
+            count=count,
+            returning=returning,
+        )
+        return AsyncFilterRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def delete(
+        self,
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+    ) -> AsyncFilterRequestBuilder[_ReturnT]:
+        """Run a DELETE query.
+
+        Args:
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+        Returns:
+            :class:`AsyncFilterRequestBuilder`
+        """
+        method, params, headers, json = pre_delete(
+            count=count,
+            returning=returning,
+        )
+        return AsyncFilterRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_sync/__init__.py b/.venv/lib/python3.12/site-packages/postgrest/_sync/__init__.py
new file mode 100644
index 00000000..9d48db4f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_sync/__init__.py
@@ -0,0 +1 @@
+from __future__ import annotations
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_sync/client.py b/.venv/lib/python3.12/site-packages/postgrest/_sync/client.py
new file mode 100644
index 00000000..29b9aab4
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_sync/client.py
@@ -0,0 +1,128 @@
+from __future__ import annotations
+
+from typing import Any, Dict, Optional, Union, cast
+
+from deprecation import deprecated
+from httpx import Headers, QueryParams, Timeout
+
+from ..base_client import BasePostgrestClient
+from ..constants import (
+    DEFAULT_POSTGREST_CLIENT_HEADERS,
+    DEFAULT_POSTGREST_CLIENT_TIMEOUT,
+)
+from ..types import CountMethod
+from ..utils import SyncClient
+from ..version import __version__
+from .request_builder import SyncRequestBuilder, SyncRPCFilterRequestBuilder
+
+_TableT = Dict[str, Any]
+
+
+class SyncPostgrestClient(BasePostgrestClient):
+    """PostgREST client."""
+
+    def __init__(
+        self,
+        base_url: str,
+        *,
+        schema: str = "public",
+        headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS,
+        timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT,
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> None:
+        BasePostgrestClient.__init__(
+            self,
+            base_url,
+            schema=schema,
+            headers=headers,
+            timeout=timeout,
+            verify=verify,
+            proxy=proxy,
+        )
+        self.session = cast(SyncClient, self.session)
+
+    def create_session(
+        self,
+        base_url: str,
+        headers: Dict[str, str],
+        timeout: Union[int, float, Timeout],
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> SyncClient:
+        return SyncClient(
+            base_url=base_url,
+            headers=headers,
+            timeout=timeout,
+            verify=verify,
+            proxy=proxy,
+            follow_redirects=True,
+            http2=True,
+        )
+
+    def __enter__(self) -> SyncPostgrestClient:
+        return self
+
+    def __exit__(self, exc_type, exc, tb) -> None:
+        self.aclose()
+
+    def aclose(self) -> None:
+        """Close the underlying HTTP connections."""
+        self.session.aclose()
+
+    def from_(self, table: str) -> SyncRequestBuilder[_TableT]:
+        """Perform a table operation.
+
+        Args:
+            table: The name of the table
+        Returns:
+            :class:`AsyncRequestBuilder`
+        """
+        return SyncRequestBuilder[_TableT](self.session, f"/{table}")
+
+    def table(self, table: str) -> SyncRequestBuilder[_TableT]:
+        """Alias to :meth:`from_`."""
+        return self.from_(table)
+
+    @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead")
+    def from_table(self, table: str) -> SyncRequestBuilder:
+        """Alias to :meth:`from_`."""
+        return self.from_(table)
+
+    def rpc(
+        self,
+        func: str,
+        params: dict,
+        count: Optional[CountMethod] = None,
+        head: bool = False,
+        get: bool = False,
+    ) -> SyncRPCFilterRequestBuilder[Any]:
+        """Perform a stored procedure call.
+
+        Args:
+            func: The name of the remote procedure to run.
+            params: The parameters to be passed to the remote procedure.
+            count: The method to use to get the count of rows returned.
+            head: When set to `true`, `data` will not be returned. Useful if you only need the count.
+            get: When set to `true`, the function will be called with read-only access mode.
+        Returns:
+            :class:`AsyncRPCFilterRequestBuilder`
+        Example:
+            .. code-block:: python
+
+                await client.rpc("foobar", {"arg": "value"}).execute()
+
+        .. versionchanged:: 0.10.9
+            This method now returns a :class:`AsyncRPCFilterRequestBuilder`.
+        .. versionchanged:: 0.10.2
+            This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to
+            filter on the RPC's resultset.
+        """
+        method = "HEAD" if head else "GET" if get else "POST"
+
+        headers = Headers({"Prefer": f"count={count}"}) if count else Headers()
+
+        # the params here are params to be sent to the RPC and not the queryparams!
+        return SyncRPCFilterRequestBuilder[Any](
+            self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params
+        )
diff --git a/.venv/lib/python3.12/site-packages/postgrest/_sync/request_builder.py b/.venv/lib/python3.12/site-packages/postgrest/_sync/request_builder.py
new file mode 100644
index 00000000..8b0eb160
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/_sync/request_builder.py
@@ -0,0 +1,415 @@
+from __future__ import annotations
+
+from json import JSONDecodeError
+from typing import Any, Generic, Optional, TypeVar, Union
+
+from httpx import Headers, QueryParams
+from pydantic import ValidationError
+
+from ..base_request_builder import (
+    APIResponse,
+    BaseFilterRequestBuilder,
+    BaseRPCRequestBuilder,
+    BaseSelectRequestBuilder,
+    CountMethod,
+    SingleAPIResponse,
+    pre_delete,
+    pre_insert,
+    pre_select,
+    pre_update,
+    pre_upsert,
+)
+from ..exceptions import APIError, generate_default_error_message
+from ..types import ReturnMethod
+from ..utils import SyncClient, get_origin_and_cast
+
+_ReturnT = TypeVar("_ReturnT")
+
+
+class SyncQueryRequestBuilder(Generic[_ReturnT]):
+    def __init__(
+        self,
+        session: SyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: Union[dict, list],
+    ) -> None:
+        self.session = session
+        self.path = path
+        self.http_method = http_method
+        self.headers = headers
+        self.params = params
+        self.json = None if http_method in {"GET", "HEAD"} else json
+
+    def execute(self) -> APIResponse[_ReturnT]:
+        """Execute the query.
+
+        .. tip::
+            This is the last method called, after the query is built.
+
+        Returns:
+            :class:`APIResponse`
+
+        Raises:
+            :class:`APIError` If the API raised an error.
+        """
+        r = self.session.request(
+            self.http_method,
+            self.path,
+            json=self.json,
+            params=self.params,
+            headers=self.headers,
+        )
+        try:
+            if r.is_success:
+                if self.http_method != "HEAD":
+                    body = r.text
+                    if self.headers.get("Accept") == "text/csv":
+                        return body
+                    if self.headers.get(
+                        "Accept"
+                    ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"):
+                        if "+json" not in self.headers.get("Accept"):
+                            return body
+                return APIResponse[_ReturnT].from_http_request_response(r)
+            else:
+                raise APIError(r.json())
+        except ValidationError as e:
+            raise APIError(r.json()) from e
+        except JSONDecodeError:
+            raise APIError(generate_default_error_message(r))
+
+
+class SyncSingleRequestBuilder(Generic[_ReturnT]):
+    def __init__(
+        self,
+        session: SyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        self.session = session
+        self.path = path
+        self.http_method = http_method
+        self.headers = headers
+        self.params = params
+        self.json = json
+
+    def execute(self) -> SingleAPIResponse[_ReturnT]:
+        """Execute the query.
+
+        .. tip::
+            This is the last method called, after the query is built.
+
+        Returns:
+            :class:`SingleAPIResponse`
+
+        Raises:
+            :class:`APIError` If the API raised an error.
+        """
+        r = self.session.request(
+            self.http_method,
+            self.path,
+            json=self.json,
+            params=self.params,
+            headers=self.headers,
+        )
+        try:
+            if (
+                200 <= r.status_code <= 299
+            ):  # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok)
+                return SingleAPIResponse[_ReturnT].from_http_request_response(r)
+            else:
+                raise APIError(r.json())
+        except ValidationError as e:
+            raise APIError(r.json()) from e
+        except JSONDecodeError:
+            raise APIError(generate_default_error_message(r))
+
+
+class SyncMaybeSingleRequestBuilder(SyncSingleRequestBuilder[_ReturnT]):
+    def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]:
+        r = None
+        try:
+            r = SyncSingleRequestBuilder[_ReturnT].execute(self)
+        except APIError as e:
+            if e.details and "The result contains 0 rows" in e.details:
+                return None
+        if not r:
+            raise APIError(
+                {
+                    "message": "Missing response",
+                    "code": "204",
+                    "hint": "Please check traceback of the code",
+                    "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.",
+                }
+            )
+        return r
+
+
+# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319
+class SyncFilterRequestBuilder(BaseFilterRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT]):  # type: ignore
+    def __init__(
+        self,
+        session: SyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+
+# this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf
+class SyncRPCFilterRequestBuilder(
+    BaseRPCRequestBuilder[_ReturnT], SyncSingleRequestBuilder[_ReturnT]
+):
+    def __init__(
+        self,
+        session: SyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(SyncSingleRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+
+# ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319
+class SyncSelectRequestBuilder(BaseSelectRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT]):  # type: ignore
+    def __init__(
+        self,
+        session: SyncClient,
+        path: str,
+        http_method: str,
+        headers: Headers,
+        params: QueryParams,
+        json: dict,
+    ) -> None:
+        get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+        get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__(
+            self, session, path, http_method, headers, params, json
+        )
+
+    def single(self) -> SyncSingleRequestBuilder[_ReturnT]:
+        """Specify that the query will only return a single row in response.
+
+        .. caution::
+            The API will raise an error if the query returned more than one row.
+        """
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return SyncSingleRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def maybe_single(self) -> SyncMaybeSingleRequestBuilder[_ReturnT]:
+        """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error."""
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return SyncMaybeSingleRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def text_search(
+        self, column: str, query: str, options: dict[str, Any] = {}
+    ) -> SyncFilterRequestBuilder[_ReturnT]:
+        type_ = options.get("type")
+        type_part = ""
+        if type_ == "plain":
+            type_part = "pl"
+        elif type_ == "phrase":
+            type_part = "ph"
+        elif type_ == "web_search":
+            type_part = "w"
+        config_part = f"({options.get('config')})" if options.get("config") else ""
+        self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}")
+
+        return SyncQueryRequestBuilder[_ReturnT](
+            headers=self.headers,
+            http_method=self.http_method,
+            json=self.json,
+            params=self.params,
+            path=self.path,
+            session=self.session,  # type: ignore
+        )
+
+    def csv(self) -> SyncSingleRequestBuilder[str]:
+        """Specify that the query must retrieve data as a single CSV string."""
+        self.headers["Accept"] = "text/csv"
+        return SyncSingleRequestBuilder[str](
+            session=self.session,  # type: ignore
+            path=self.path,
+            http_method=self.http_method,
+            headers=self.headers,
+            params=self.params,
+            json=self.json,
+        )
+
+
+class SyncRequestBuilder(Generic[_ReturnT]):
+    def __init__(self, session: SyncClient, path: str) -> None:
+        self.session = session
+        self.path = path
+
+    def select(
+        self,
+        *columns: str,
+        count: Optional[CountMethod] = None,
+        head: Optional[bool] = None,
+    ) -> SyncSelectRequestBuilder[_ReturnT]:
+        """Run a SELECT query.
+
+        Args:
+            *columns: The names of the columns to fetch.
+            count: The method to use to get the count of rows returned.
+        Returns:
+            :class:`SyncSelectRequestBuilder`
+        """
+        method, params, headers, json = pre_select(*columns, count=count, head=head)
+        return SyncSelectRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def insert(
+        self,
+        json: Union[dict, list],
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+        upsert: bool = False,
+        default_to_null: bool = True,
+    ) -> SyncQueryRequestBuilder[_ReturnT]:
+        """Run an INSERT query.
+
+        Args:
+            json: The row to be inserted.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+            upsert: Whether the query should be an upsert.
+            default_to_null: Make missing fields default to `null`.
+                Otherwise, use the default value for the column.
+                Only applies for bulk inserts.
+        Returns:
+            :class:`SyncQueryRequestBuilder`
+        """
+        method, params, headers, json = pre_insert(
+            json,
+            count=count,
+            returning=returning,
+            upsert=upsert,
+            default_to_null=default_to_null,
+        )
+        return SyncQueryRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def upsert(
+        self,
+        json: Union[dict, list],
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+        ignore_duplicates: bool = False,
+        on_conflict: str = "",
+        default_to_null: bool = True,
+    ) -> SyncQueryRequestBuilder[_ReturnT]:
+        """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query.
+
+        Args:
+            json: The row to be inserted.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+            ignore_duplicates: Whether duplicate rows should be ignored.
+            on_conflict: Specified columns to be made to work with UNIQUE constraint.
+            default_to_null: Make missing fields default to `null`. Otherwise, use the
+                default value for the column. This only applies when inserting new rows,
+                not when merging with existing rows under `ignoreDuplicates: false`.
+                This also only applies when doing bulk upserts.
+        Returns:
+            :class:`SyncQueryRequestBuilder`
+        """
+        method, params, headers, json = pre_upsert(
+            json,
+            count=count,
+            returning=returning,
+            ignore_duplicates=ignore_duplicates,
+            on_conflict=on_conflict,
+            default_to_null=default_to_null,
+        )
+        return SyncQueryRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def update(
+        self,
+        json: dict,
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+    ) -> SyncFilterRequestBuilder[_ReturnT]:
+        """Run an UPDATE query.
+
+        Args:
+            json: The updated fields.
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+        Returns:
+            :class:`SyncFilterRequestBuilder`
+        """
+        method, params, headers, json = pre_update(
+            json,
+            count=count,
+            returning=returning,
+        )
+        return SyncFilterRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
+
+    def delete(
+        self,
+        *,
+        count: Optional[CountMethod] = None,
+        returning: ReturnMethod = ReturnMethod.representation,
+    ) -> SyncFilterRequestBuilder[_ReturnT]:
+        """Run a DELETE query.
+
+        Args:
+            count: The method to use to get the count of rows returned.
+            returning: Either 'minimal' or 'representation'
+        Returns:
+            :class:`SyncFilterRequestBuilder`
+        """
+        method, params, headers, json = pre_delete(
+            count=count,
+            returning=returning,
+        )
+        return SyncFilterRequestBuilder[_ReturnT](
+            self.session, self.path, method, headers, params, json
+        )
diff --git a/.venv/lib/python3.12/site-packages/postgrest/base_client.py b/.venv/lib/python3.12/site-packages/postgrest/base_client.py
new file mode 100644
index 00000000..e2bf417f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/base_client.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from typing import Dict, Optional, Union
+
+from httpx import BasicAuth, Timeout
+
+from .utils import AsyncClient, SyncClient, is_http_url, is_valid_jwt
+
+
+class BasePostgrestClient(ABC):
+    """Base PostgREST client."""
+
+    def __init__(
+        self,
+        base_url: str,
+        *,
+        schema: str,
+        headers: Dict[str, str],
+        timeout: Union[int, float, Timeout],
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> None:
+        if not is_http_url(base_url):
+            ValueError("base_url must be a valid HTTP URL string")
+        headers = {
+            **headers,
+            "Accept-Profile": schema,
+            "Content-Profile": schema,
+        }
+        self.session = self.create_session(base_url, headers, timeout, verify, proxy)
+
+    @abstractmethod
+    def create_session(
+        self,
+        base_url: str,
+        headers: Dict[str, str],
+        timeout: Union[int, float, Timeout],
+        verify: bool = True,
+        proxy: Optional[str] = None,
+    ) -> Union[SyncClient, AsyncClient]:
+        raise NotImplementedError()
+
+    def auth(
+        self,
+        token: Optional[str],
+        *,
+        username: Union[str, bytes, None] = None,
+        password: Union[str, bytes] = "",
+    ):
+        """
+        Authenticate the client with either bearer token or basic authentication.
+
+        Raises:
+            `ValueError`: If neither authentication scheme is provided.
+
+        .. note::
+            Bearer token is preferred if both ones are provided.
+        """
+        if token:
+            if not is_valid_jwt(token):
+                ValueError("token must be a valid JWT authorization token")
+            self.session.headers["Authorization"] = f"Bearer {token}"
+        elif username:
+            self.session.auth = BasicAuth(username, password)
+        else:
+            raise ValueError(
+                "Neither bearer token or basic authentication scheme is provided"
+            )
+        return self
+
+    def schema(self, schema: str):
+        """Switch to another schema."""
+        self.session.headers.update(
+            {
+                "Accept-Profile": schema,
+                "Content-Profile": schema,
+            }
+        )
+        return self
diff --git a/.venv/lib/python3.12/site-packages/postgrest/base_request_builder.py b/.venv/lib/python3.12/site-packages/postgrest/base_request_builder.py
new file mode 100644
index 00000000..7b5ab4b7
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/base_request_builder.py
@@ -0,0 +1,685 @@
+from __future__ import annotations
+
+import json
+from json import JSONDecodeError
+from re import search
+from typing import (
+    Any,
+    Dict,
+    Generic,
+    Iterable,
+    List,
+    Literal,
+    NamedTuple,
+    Optional,
+    Tuple,
+    Type,
+    TypeVar,
+    Union,
+)
+
+from httpx import Headers, QueryParams
+from httpx import Response as RequestResponse
+from pydantic import BaseModel
+
+try:
+    from typing import Self
+except ImportError:
+    from typing_extensions import Self
+
+try:
+    # >= 2.0.0
+    from pydantic import field_validator
+except ImportError:
+    # < 2.0.0
+    from pydantic import validator as field_validator
+
+from .types import CountMethod, Filters, RequestMethod, ReturnMethod
+from .utils import AsyncClient, SyncClient, get_origin_and_cast, sanitize_param
+
+
+class QueryArgs(NamedTuple):
+    # groups the method, json, headers and params for a query in a single object
+    method: RequestMethod
+    params: QueryParams
+    headers: Headers
+    json: Dict[Any, Any]
+
+
+def _unique_columns(json: List[Dict]):
+    unique_keys = {key for row in json for key in row.keys()}
+    columns = ",".join([f'"{k}"' for k in unique_keys])
+    return columns
+
+
+def _cleaned_columns(columns: Tuple[str, ...]) -> str:
+    quoted = False
+    cleaned = []
+
+    for column in columns:
+        clean_column = ""
+        for char in column:
+            if char.isspace() and not quoted:
+                continue
+            if char == '"':
+                quoted = not quoted
+            clean_column += char
+        cleaned.append(clean_column)
+
+    return ",".join(cleaned)
+
+
+def pre_select(
+    *columns: str,
+    count: Optional[CountMethod] = None,
+    head: Optional[bool] = None,
+) -> QueryArgs:
+    method = RequestMethod.HEAD if head else RequestMethod.GET
+    cleaned_columns = _cleaned_columns(columns or "*")
+    params = QueryParams({"select": cleaned_columns})
+
+    headers = Headers({"Prefer": f"count={count}"}) if count else Headers()
+    return QueryArgs(method, params, headers, {})
+
+
+def pre_insert(
+    json: Union[dict, list],
+    *,
+    count: Optional[CountMethod],
+    returning: ReturnMethod,
+    upsert: bool,
+    default_to_null: bool = True,
+) -> QueryArgs:
+    prefer_headers = [f"return={returning}"]
+    if count:
+        prefer_headers.append(f"count={count}")
+    if upsert:
+        prefer_headers.append("resolution=merge-duplicates")
+    if not default_to_null:
+        prefer_headers.append("missing=default")
+    headers = Headers({"Prefer": ",".join(prefer_headers)})
+    # Adding 'columns' query parameters
+    query_params = {}
+    if isinstance(json, list):
+        query_params = {"columns": _unique_columns(json)}
+    return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json)
+
+
+def pre_upsert(
+    json: Union[dict, list],
+    *,
+    count: Optional[CountMethod],
+    returning: ReturnMethod,
+    ignore_duplicates: bool,
+    on_conflict: str = "",
+    default_to_null: bool = True,
+) -> QueryArgs:
+    query_params = {}
+    prefer_headers = [f"return={returning}"]
+    if count:
+        prefer_headers.append(f"count={count}")
+    resolution = "ignore" if ignore_duplicates else "merge"
+    prefer_headers.append(f"resolution={resolution}-duplicates")
+    if not default_to_null:
+        prefer_headers.append("missing=default")
+    headers = Headers({"Prefer": ",".join(prefer_headers)})
+    if on_conflict:
+        query_params["on_conflict"] = on_conflict
+    # Adding 'columns' query parameters
+    if isinstance(json, list):
+        query_params["columns"] = _unique_columns(json)
+    return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json)
+
+
+def pre_update(
+    json: dict,
+    *,
+    count: Optional[CountMethod],
+    returning: ReturnMethod,
+) -> QueryArgs:
+    prefer_headers = [f"return={returning}"]
+    if count:
+        prefer_headers.append(f"count={count}")
+    headers = Headers({"Prefer": ",".join(prefer_headers)})
+    return QueryArgs(RequestMethod.PATCH, QueryParams(), headers, json)
+
+
+def pre_delete(
+    *,
+    count: Optional[CountMethod],
+    returning: ReturnMethod,
+) -> QueryArgs:
+    prefer_headers = [f"return={returning}"]
+    if count:
+        prefer_headers.append(f"count={count}")
+    headers = Headers({"Prefer": ",".join(prefer_headers)})
+    return QueryArgs(RequestMethod.DELETE, QueryParams(), headers, {})
+
+
+_ReturnT = TypeVar("_ReturnT")
+
+
+# the APIResponse.data is marked as _ReturnT instead of list[_ReturnT]
+# as it is also returned in the case of rpc() calls; and rpc calls do not
+# necessarily return lists.
+# https://github.com/supabase-community/postgrest-py/issues/200
+class APIResponse(BaseModel, Generic[_ReturnT]):
+    data: List[_ReturnT]
+    """The data returned by the query."""
+    count: Optional[int] = None
+    """The number of rows returned."""
+
+    @field_validator("data")
+    @classmethod
+    def raise_when_api_error(cls: Type[Self], value: Any) -> Any:
+        if isinstance(value, dict) and value.get("message"):
+            raise ValueError("You are passing an API error to the data field.")
+        return value
+
+    @staticmethod
+    def _get_count_from_content_range_header(
+        content_range_header: str,
+    ) -> Optional[int]:
+        content_range = content_range_header.split("/")
+        return None if len(content_range) < 2 else int(content_range[1])
+
+    @staticmethod
+    def _is_count_in_prefer_header(prefer_header: str) -> bool:
+        pattern = f"count=({'|'.join([cm.value for cm in CountMethod])})"
+        return bool(search(pattern, prefer_header))
+
+    @classmethod
+    def _get_count_from_http_request_response(
+        cls: Type[Self],
+        request_response: RequestResponse,
+    ) -> Optional[int]:
+        prefer_header: Optional[str] = request_response.request.headers.get("prefer")
+        if not prefer_header:
+            return None
+        is_count_in_prefer_header = cls._is_count_in_prefer_header(prefer_header)
+        content_range_header: Optional[str] = request_response.headers.get(
+            "content-range"
+        )
+        return (
+            cls._get_count_from_content_range_header(content_range_header)
+            if (is_count_in_prefer_header and content_range_header)
+            else None
+        )
+
+    @classmethod
+    def from_http_request_response(
+        cls: Type[Self], request_response: RequestResponse
+    ) -> Self:
+        count = cls._get_count_from_http_request_response(request_response)
+        try:
+            data = request_response.json()
+        except JSONDecodeError:
+            data = request_response.text if len(request_response.text) > 0 else []
+        # the type-ignore here is as pydantic needs us to pass the type parameter
+        # here explicitly, but pylance already knows that cls is correctly parametrized
+        return cls[_ReturnT](data=data, count=count)  # type: ignore
+
+    @classmethod
+    def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self:
+        keys = dict.keys()
+        assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys
+        return cls[_ReturnT](  # type: ignore
+            data=dict.get("data"), count=dict.get("count"), error=dict.get("error")
+        )
+
+
+class SingleAPIResponse(APIResponse[_ReturnT], Generic[_ReturnT]):
+    data: _ReturnT  # type: ignore
+    """The data returned by the query."""
+
+    @classmethod
+    def from_http_request_response(
+        cls: Type[Self], request_response: RequestResponse
+    ) -> Self:
+        count = cls._get_count_from_http_request_response(request_response)
+        try:
+            data = request_response.json()
+        except JSONDecodeError:
+            data = request_response.text if len(request_response.text) > 0 else []
+        return cls[_ReturnT](data=data, count=count)  # type: ignore
+
+    @classmethod
+    def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self:
+        keys = dict.keys()
+        assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys
+        return cls[_ReturnT](  # type: ignore
+            data=dict.get("data"), count=dict.get("count"), error=dict.get("error")
+        )
+
+
+class BaseFilterRequestBuilder(Generic[_ReturnT]):
+    def __init__(
+        self,
+        session: Union[AsyncClient, SyncClient],
+        headers: Headers,
+        params: QueryParams,
+    ) -> None:
+        self.session = session
+        self.headers = headers
+        self.params = params
+        self.negate_next = False
+
+    @property
+    def not_(self: Self) -> Self:
+        """Whether the filter applied next should be negated."""
+        self.negate_next = True
+        return self
+
+    def filter(self: Self, column: str, operator: str, criteria: str) -> Self:
+        """Apply filters on a query.
+
+        Args:
+            column: The name of the column to apply a filter on
+            operator: The operator to use while filtering
+            criteria: The value to filter by
+        """
+        if self.negate_next is True:
+            self.negate_next = False
+            operator = f"{Filters.NOT}.{operator}"
+        key, val = sanitize_param(column), f"{operator}.{criteria}"
+        self.params = self.params.add(key, val)
+        return self
+
+    def eq(self: Self, column: str, value: Any) -> Self:
+        """An 'equal to' filter.
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.EQ, value)
+
+    def neq(self: Self, column: str, value: Any) -> Self:
+        """A 'not equal to' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.NEQ, value)
+
+    def gt(self: Self, column: str, value: Any) -> Self:
+        """A 'greater than' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.GT, value)
+
+    def gte(self: Self, column: str, value: Any) -> Self:
+        """A 'greater than or equal to' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.GTE, value)
+
+    def lt(self: Self, column: str, value: Any) -> Self:
+        """A 'less than' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.LT, value)
+
+    def lte(self: Self, column: str, value: Any) -> Self:
+        """A 'less than or equal to' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        return self.filter(column, Filters.LTE, value)
+
+    def is_(self: Self, column: str, value: Any) -> Self:
+        """An 'is' filter
+
+        Args:
+            column: The name of the column to apply a filter on
+            value: The value to filter by
+        """
+        if value is None:
+            value = "null"
+        return self.filter(column, Filters.IS, value)
+
+    def like(self: Self, column: str, pattern: str) -> Self:
+        """A 'LIKE' filter, to use for pattern matching.
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+        return self.filter(column, Filters.LIKE, pattern)
+
+    def like_all_of(self: Self, column: str, pattern: str) -> Self:
+        """A 'LIKE' filter, to use for pattern matching.
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+
+        return self.filter(column, Filters.LIKE_ALL, f"{{{pattern}}}")
+
+    def like_any_of(self: Self, column: str, pattern: str) -> Self:
+        """A 'LIKE' filter, to use for pattern matching.
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+
+        return self.filter(column, Filters.LIKE_ANY, f"{{{pattern}}}")
+
+    def ilike_all_of(self: Self, column: str, pattern: str) -> Self:
+        """A 'ILIKE' filter, to use for pattern matching (case insensitive).
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+
+        return self.filter(column, Filters.ILIKE_ALL, f"{{{pattern}}}")
+
+    def ilike_any_of(self: Self, column: str, pattern: str) -> Self:
+        """A 'ILIKE' filter, to use for pattern matching (case insensitive).
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+
+        return self.filter(column, Filters.ILIKE_ANY, f"{{{pattern}}}")
+
+    def ilike(self: Self, column: str, pattern: str) -> Self:
+        """An 'ILIKE' filter, to use for pattern matching (case insensitive).
+
+        Args:
+            column: The name of the column to apply a filter on
+            pattern: The pattern to filter by
+        """
+        return self.filter(column, Filters.ILIKE, pattern)
+
+    def or_(self: Self, filters: str, reference_table: Optional[str] = None) -> Self:
+        """An 'or' filter
+
+        Args:
+            filters: The filters to use, following PostgREST syntax
+            reference_table: Set this to filter on referenced tables instead of the parent table
+        """
+        key = f"{sanitize_param(reference_table)}.or" if reference_table else "or"
+        self.params = self.params.add(key, f"({filters})")
+        return self
+
+    def fts(self: Self, column: str, query: Any) -> Self:
+        return self.filter(column, Filters.FTS, query)
+
+    def plfts(self: Self, column: str, query: Any) -> Self:
+        return self.filter(column, Filters.PLFTS, query)
+
+    def phfts(self: Self, column: str, query: Any) -> Self:
+        return self.filter(column, Filters.PHFTS, query)
+
+    def wfts(self: Self, column: str, query: Any) -> Self:
+        return self.filter(column, Filters.WFTS, query)
+
+    def in_(self: Self, column: str, values: Iterable[Any]) -> Self:
+        values = map(sanitize_param, values)
+        values = ",".join(values)
+        return self.filter(column, Filters.IN, f"({values})")
+
+    def cs(self: Self, column: str, values: Iterable[Any]) -> Self:
+        values = ",".join(values)
+        return self.filter(column, Filters.CS, f"{{{values}}}")
+
+    def cd(self: Self, column: str, values: Iterable[Any]) -> Self:
+        values = ",".join(values)
+        return self.filter(column, Filters.CD, f"{{{values}}}")
+
+    def contains(
+        self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]]
+    ) -> Self:
+        if isinstance(value, str):
+            # range types can be inclusive '[', ']' or exclusive '(', ')' so just
+            # keep it simple and accept a string
+            return self.filter(column, Filters.CS, value)
+        if not isinstance(value, dict) and isinstance(value, Iterable):
+            # Expected to be some type of iterable
+            stringified_values = ",".join(value)
+            return self.filter(column, Filters.CS, f"{{{stringified_values}}}")
+
+        return self.filter(column, Filters.CS, json.dumps(value))
+
+    def contained_by(
+        self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]]
+    ) -> Self:
+        if isinstance(value, str):
+            # range
+            return self.filter(column, Filters.CD, value)
+        if not isinstance(value, dict) and isinstance(value, Iterable):
+            stringified_values = ",".join(value)
+            return self.filter(column, Filters.CD, f"{{{stringified_values}}}")
+        return self.filter(column, Filters.CD, json.dumps(value))
+
+    def ov(self: Self, column: str, value: Iterable[Any]) -> Self:
+        if isinstance(value, str):
+            # range types can be inclusive '[', ']' or exclusive '(', ')' so just
+            # keep it simple and accept a string
+            return self.filter(column, Filters.OV, value)
+        if not isinstance(value, dict) and isinstance(value, Iterable):
+            # Expected to be some type of iterable
+            stringified_values = ",".join(value)
+            return self.filter(column, Filters.OV, f"{{{stringified_values}}}")
+        return self.filter(column, Filters.OV, json.dumps(value))
+
+    def sl(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.filter(column, Filters.SL, f"({range[0]},{range[1]})")
+
+    def sr(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.filter(column, Filters.SR, f"({range[0]},{range[1]})")
+
+    def nxl(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.filter(column, Filters.NXL, f"({range[0]},{range[1]})")
+
+    def nxr(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.filter(column, Filters.NXR, f"({range[0]},{range[1]})")
+
+    def adj(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.filter(column, Filters.ADJ, f"({range[0]},{range[1]})")
+
+    def range_gt(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.sr(column, range)
+
+    def range_gte(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.nxl(column, range)
+
+    def range_lt(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.sl(column, range)
+
+    def range_lte(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.nxr(column, range)
+
+    def range_adjacent(self: Self, column: str, range: Tuple[int, int]) -> Self:
+        return self.adj(column, range)
+
+    def overlaps(self: Self, column: str, values: Iterable[Any]) -> Self:
+        return self.ov(column, values)
+
+    def match(self: Self, query: Dict[str, Any]) -> Self:
+        updated_query = self
+
+        if not query:
+            raise ValueError(
+                "query dictionary should contain at least one key-value pair"
+            )
+
+        for key, value in query.items():
+            updated_query = self.eq(key, value)
+
+        return updated_query
+
+
+class BaseSelectRequestBuilder(BaseFilterRequestBuilder[_ReturnT]):
+    def __init__(
+        self,
+        session: Union[AsyncClient, SyncClient],
+        headers: Headers,
+        params: QueryParams,
+    ) -> None:
+        # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__
+        # tries to call _GenericAlias.__init__ - which is the wrong method
+        # The __origin__ attribute of the _GenericAlias is the actual class
+        get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+
+    def explain(
+        self: Self,
+        analyze: bool = False,
+        verbose: bool = False,
+        settings: bool = False,
+        buffers: bool = False,
+        wal: bool = False,
+        format: Literal["text", "json"] = "text",
+    ) -> Self:
+        options = [
+            key
+            for key, value in locals().items()
+            if key not in ["self", "format"] and value
+        ]
+        options_str = "|".join(options)
+        self.headers["Accept"] = (
+            f"application/vnd.pgrst.plan+{format}; options={options_str}"
+        )
+        return self
+
+    def order(
+        self: Self,
+        column: str,
+        *,
+        desc: bool = False,
+        nullsfirst: bool = False,
+        foreign_table: Optional[str] = None,
+    ) -> Self:
+        """Sort the returned rows in some specific order.
+
+        Args:
+            column: The column to order by
+            desc: Whether the rows should be ordered in descending order or not.
+            nullsfirst: nullsfirst
+            foreign_table: Foreign table name whose results are to be ordered.
+        .. versionchanged:: 0.10.3
+           Allow ordering results for foreign tables with the foreign_table parameter.
+        """
+
+        new_order_parameter = (
+            f"{foreign_table + '(' if foreign_table else ''}{column}{')' if foreign_table else ''}"
+            f"{'.desc' if desc else ''}{'.nullsfirst' if nullsfirst else ''}"
+        )
+
+        existing_order_parameter = self.params.get("order")
+        if existing_order_parameter:
+            self.params = self.params.remove("order")
+            new_order_parameter = f"{existing_order_parameter},{new_order_parameter}"
+
+        self.params = self.params.add(
+            "order",
+            new_order_parameter,
+        )
+        return self
+
+    def limit(self: Self, size: int, *, foreign_table: Optional[str] = None) -> Self:
+        """Limit the number of rows returned by a query.
+
+        Args:
+            size: The number of rows to be returned
+            foreign_table: Foreign table name to limit
+        .. versionchanged:: 0.10.3
+           Allow limiting results returned for foreign tables with the foreign_table parameter.
+        """
+        self.params = self.params.add(
+            f"{foreign_table}.limit" if foreign_table else "limit",
+            size,
+        )
+        return self
+
+    def offset(self: _FilterT, size: int) -> _FilterT:
+        """Set the starting row index returned by a query.
+        Args:
+            size: The number of the row to start at
+        """
+        self.params = self.params.add(
+            "offset",
+            size,
+        )
+        return self
+
+    def range(
+        self: Self, start: int, end: int, foreign_table: Optional[str] = None
+    ) -> Self:
+        self.params = self.params.add(
+            f"{foreign_table}.offset" if foreign_table else "offset", start
+        )
+        self.params = self.params.add(
+            f"{foreign_table}.limit" if foreign_table else "limit",
+            end - start + 1,
+        )
+        return self
+
+
+class BaseRPCRequestBuilder(BaseSelectRequestBuilder[_ReturnT]):
+    def __init__(
+        self,
+        session: Union[AsyncClient, SyncClient],
+        headers: Headers,
+        params: QueryParams,
+    ) -> None:
+        # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__
+        # tries to call _GenericAlias.__init__ - which is the wrong method
+        # The __origin__ attribute of the _GenericAlias is the actual class
+        get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__(
+            self, session, headers, params
+        )
+
+    def select(
+        self,
+        *columns: str,
+    ) -> Self:
+        """Run a SELECT query.
+
+        Args:
+            *columns: The names of the columns to fetch.
+        Returns:
+            :class:`BaseSelectRequestBuilder`
+        """
+        method, params, headers, json = pre_select(*columns, count=None)
+        self.params = self.params.add("select", params.get("select"))
+        self.headers["Prefer"] = "return=representation"
+        return self
+
+    def single(self) -> Self:
+        """Specify that the query will only return a single row in response.
+
+        .. caution::
+            The API will raise an error if the query returned more than one row.
+        """
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return self
+
+    def maybe_single(self) -> Self:
+        """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error."""
+        self.headers["Accept"] = "application/vnd.pgrst.object+json"
+        return self
+
+    def csv(self) -> Self:
+        """Specify that the query must retrieve data as a single CSV string."""
+        self.headers["Accept"] = "text/csv"
+        return self
diff --git a/.venv/lib/python3.12/site-packages/postgrest/constants.py b/.venv/lib/python3.12/site-packages/postgrest/constants.py
new file mode 100644
index 00000000..4c3c17c8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/constants.py
@@ -0,0 +1,6 @@
+DEFAULT_POSTGREST_CLIENT_HEADERS = {
+    "Accept": "application/json",
+    "Content-Type": "application/json",
+}
+
+DEFAULT_POSTGREST_CLIENT_TIMEOUT = 120
diff --git a/.venv/lib/python3.12/site-packages/postgrest/deprecated_client.py b/.venv/lib/python3.12/site-packages/postgrest/deprecated_client.py
new file mode 100644
index 00000000..1d7d9722
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/deprecated_client.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from deprecation import deprecated
+
+from ._async.client import AsyncPostgrestClient
+from .version import __version__
+
+
+class Client(AsyncPostgrestClient):
+    """Alias to PostgrestClient."""
+
+    @deprecated("0.2.0", "1.0.0", __version__, "Use PostgrestClient instead")
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+
+
+PostgrestClient = Client
diff --git a/.venv/lib/python3.12/site-packages/postgrest/deprecated_get_request_builder.py b/.venv/lib/python3.12/site-packages/postgrest/deprecated_get_request_builder.py
new file mode 100644
index 00000000..767cacfc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/deprecated_get_request_builder.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from deprecation import deprecated
+
+from ._async.request_builder import AsyncSelectRequestBuilder
+from .version import __version__
+
+
+class GetRequestBuilder(AsyncSelectRequestBuilder):
+    """Alias to SelectRequestBuilder."""
+
+    @deprecated("0.4.0", "1.0.0", __version__, "Use SelectRequestBuilder instead")
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
diff --git a/.venv/lib/python3.12/site-packages/postgrest/exceptions.py b/.venv/lib/python3.12/site-packages/postgrest/exceptions.py
new file mode 100644
index 00000000..303c5705
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/exceptions.py
@@ -0,0 +1,50 @@
+from typing import Dict, Optional
+
+
+class APIError(Exception):
+    """
+    Base exception for all API errors.
+    """
+
+    _raw_error: Dict[str, str]
+    message: Optional[str]
+    """The error message."""
+    code: Optional[str]
+    """The error code."""
+    hint: Optional[str]
+    """The error hint."""
+    details: Optional[str]
+    """The error details."""
+
+    def __init__(self, error: Dict[str, str]) -> None:
+        self._raw_error = error
+        self.message = error.get("message")
+        self.code = error.get("code")
+        self.hint = error.get("hint")
+        self.details = error.get("details")
+        Exception.__init__(self, str(self))
+
+    def __repr__(self) -> str:
+        error_text = f"Error {self.code}:" if self.code else ""
+        message_text = f"\nMessage: {self.message}" if self.message else ""
+        hint_text = f"\nHint: {self.hint}" if self.hint else ""
+        details_text = f"\nDetails: {self.details}" if self.details else ""
+        complete_error_text = f"{error_text}{message_text}{hint_text}{details_text}"
+        return complete_error_text or "Empty error"
+
+    def json(self) -> Dict[str, str]:
+        """Convert the error into a dictionary.
+
+        Returns:
+            :class:`dict`
+        """
+        return self._raw_error
+
+
+def generate_default_error_message(r):
+    return {
+        "message": "JSON could not be generated",
+        "code": r.status_code,
+        "hint": "Refer to full message for details",
+        "details": str(r.content),
+    }
diff --git a/.venv/lib/python3.12/site-packages/postgrest/py.typed b/.venv/lib/python3.12/site-packages/postgrest/py.typed
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/py.typed
diff --git a/.venv/lib/python3.12/site-packages/postgrest/types.py b/.venv/lib/python3.12/site-packages/postgrest/types.py
new file mode 100644
index 00000000..fa6f94ce
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/types.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+import sys
+
+if sys.version_info >= (3, 11):
+    from enum import StrEnum
+else:
+    from strenum import StrEnum
+
+
+class CountMethod(StrEnum):
+    exact = "exact"
+    planned = "planned"
+    estimated = "estimated"
+
+
+class Filters(StrEnum):
+    NOT = "not"
+    EQ = "eq"
+    NEQ = "neq"
+    GT = "gt"
+    GTE = "gte"
+    LT = "lt"
+    LTE = "lte"
+    IS = "is"
+    LIKE = "like"
+    LIKE_ALL = "like(all)"
+    LIKE_ANY = "like(any)"
+    ILIKE = "ilike"
+    ILIKE_ALL = "ilike(all)"
+    ILIKE_ANY = "ilike(any)"
+    FTS = "fts"
+    PLFTS = "plfts"
+    PHFTS = "phfts"
+    WFTS = "wfts"
+    IN = "in"
+    CS = "cs"
+    CD = "cd"
+    OV = "ov"
+    SL = "sl"
+    SR = "sr"
+    NXL = "nxl"
+    NXR = "nxr"
+    ADJ = "adj"
+
+
+class RequestMethod(StrEnum):
+    GET = "GET"
+    POST = "POST"
+    PATCH = "PATCH"
+    PUT = "PUT"
+    DELETE = "DELETE"
+    HEAD = "HEAD"
+
+
+class ReturnMethod(StrEnum):
+    minimal = "minimal"
+    representation = "representation"
diff --git a/.venv/lib/python3.12/site-packages/postgrest/utils.py b/.venv/lib/python3.12/site-packages/postgrest/utils.py
new file mode 100644
index 00000000..d2f3c48e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/utils.py
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+import re
+from typing import Any, Type, TypeVar, cast, get_origin
+from urllib.parse import urlparse
+
+from httpx import AsyncClient  # noqa: F401
+from httpx import Client as BaseClient  # noqa: F401
+
+BASE64URL_REGEX = r"^([a-z0-9_-]{4})*($|[a-z0-9_-]{3}$|[a-z0-9_-]{2}$)$"
+
+
+class SyncClient(BaseClient):
+    def aclose(self) -> None:
+        self.close()
+
+
+def sanitize_param(param: Any) -> str:
+    param_str = str(param)
+    reserved_chars = ",:()"
+    if any(char in param_str for char in reserved_chars):
+        return f'"{param_str}"'
+    return param_str
+
+
+def sanitize_pattern_param(pattern: str) -> str:
+    return sanitize_param(pattern.replace("%", "*"))
+
+
+_T = TypeVar("_T")
+
+
+def get_origin_and_cast(typ: type[type[_T]]) -> type[_T]:
+    # Base[T] is an instance of typing._GenericAlias, so doing Base[T].__init__
+    # tries to call _GenericAlias.__init__ - which is the wrong method
+    # get_origin(Base[T]) returns Base
+    # This function casts Base back to Base[T] to maintain type-safety
+    # while still allowing us to access the methods of `Base` at runtime
+    # See: definitions of request builders that use multiple-inheritance
+    # like AsyncFilterRequestBuilder
+    return cast(Type[_T], get_origin(typ))
+
+
+def is_http_url(url: str) -> bool:
+    return urlparse(url).scheme in {"https", "http"}
+
+
+def is_valid_jwt(value: str) -> bool:
+    """Checks if value looks like a JWT, does not do any extra parsing."""
+    if not isinstance(value, str):
+        return False
+
+    # Remove trailing whitespaces if any.
+    value = value.strip()
+
+    # Remove "Bearer " prefix if any.
+    if value.startswith("Bearer "):
+        value = value[7:]
+
+    # Valid JWT must have 2 dots (Header.Paylod.Signature)
+    if value.count(".") != 2:
+        return False
+
+    for part in value.split("."):
+        if not re.search(BASE64URL_REGEX, part, re.IGNORECASE):
+            return False
+
+    return True
diff --git a/.venv/lib/python3.12/site-packages/postgrest/version.py b/.venv/lib/python3.12/site-packages/postgrest/version.py
new file mode 100644
index 00000000..d8496fc5
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/postgrest/version.py
@@ -0,0 +1 @@
+__version__ = "0.19.3"  # {x-release-please-version}