aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/openai/_utils
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/openai/_utils
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/openai/_utils')
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/__init__.py60
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_logs.py42
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_proxy.py62
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_reflection.py45
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_streams.py12
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_sync.py86
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_transform.py402
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_typing.py149
-rw-r--r--.venv/lib/python3.12/site-packages/openai/_utils/_utils.py430
9 files changed, 1288 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/__init__.py b/.venv/lib/python3.12/site-packages/openai/_utils/__init__.py
new file mode 100644
index 00000000..bd01c088
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/__init__.py
@@ -0,0 +1,60 @@
+from ._logs import SensitiveHeadersFilter as SensitiveHeadersFilter
+from ._sync import asyncify as asyncify
+from ._proxy import LazyProxy as LazyProxy
+from ._utils import (
+ flatten as flatten,
+ is_dict as is_dict,
+ is_list as is_list,
+ is_given as is_given,
+ is_tuple as is_tuple,
+ json_safe as json_safe,
+ lru_cache as lru_cache,
+ is_mapping as is_mapping,
+ is_tuple_t as is_tuple_t,
+ parse_date as parse_date,
+ is_iterable as is_iterable,
+ is_sequence as is_sequence,
+ coerce_float as coerce_float,
+ is_mapping_t as is_mapping_t,
+ removeprefix as removeprefix,
+ removesuffix as removesuffix,
+ extract_files as extract_files,
+ is_sequence_t as is_sequence_t,
+ required_args as required_args,
+ coerce_boolean as coerce_boolean,
+ coerce_integer as coerce_integer,
+ file_from_path as file_from_path,
+ parse_datetime as parse_datetime,
+ is_azure_client as is_azure_client,
+ strip_not_given as strip_not_given,
+ deepcopy_minimal as deepcopy_minimal,
+ get_async_library as get_async_library,
+ maybe_coerce_float as maybe_coerce_float,
+ get_required_header as get_required_header,
+ maybe_coerce_boolean as maybe_coerce_boolean,
+ maybe_coerce_integer as maybe_coerce_integer,
+ is_async_azure_client as is_async_azure_client,
+)
+from ._typing import (
+ is_list_type as is_list_type,
+ is_union_type as is_union_type,
+ extract_type_arg as extract_type_arg,
+ is_iterable_type as is_iterable_type,
+ is_required_type as is_required_type,
+ is_annotated_type as is_annotated_type,
+ is_type_alias_type as is_type_alias_type,
+ strip_annotated_type as strip_annotated_type,
+ extract_type_var_from_base as extract_type_var_from_base,
+)
+from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator
+from ._transform import (
+ PropertyInfo as PropertyInfo,
+ transform as transform,
+ async_transform as async_transform,
+ maybe_transform as maybe_transform,
+ async_maybe_transform as async_maybe_transform,
+)
+from ._reflection import (
+ function_has_argument as function_has_argument,
+ assert_signatures_in_sync as assert_signatures_in_sync,
+)
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_logs.py b/.venv/lib/python3.12/site-packages/openai/_utils/_logs.py
new file mode 100644
index 00000000..37694693
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_logs.py
@@ -0,0 +1,42 @@
+import os
+import logging
+from typing_extensions import override
+
+from ._utils import is_dict
+
+logger: logging.Logger = logging.getLogger("openai")
+httpx_logger: logging.Logger = logging.getLogger("httpx")
+
+
+SENSITIVE_HEADERS = {"api-key", "authorization"}
+
+
+def _basic_config() -> None:
+ # e.g. [2023-10-05 14:12:26 - openai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
+ logging.basicConfig(
+ format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+ )
+
+
+def setup_logging() -> None:
+ env = os.environ.get("OPENAI_LOG")
+ if env == "debug":
+ _basic_config()
+ logger.setLevel(logging.DEBUG)
+ httpx_logger.setLevel(logging.DEBUG)
+ elif env == "info":
+ _basic_config()
+ logger.setLevel(logging.INFO)
+ httpx_logger.setLevel(logging.INFO)
+
+
+class SensitiveHeadersFilter(logging.Filter):
+ @override
+ def filter(self, record: logging.LogRecord) -> bool:
+ if is_dict(record.args) and "headers" in record.args and is_dict(record.args["headers"]):
+ headers = record.args["headers"] = {**record.args["headers"]}
+ for header in headers:
+ if str(header).lower() in SENSITIVE_HEADERS:
+ headers[header] = "<redacted>"
+ return True
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_proxy.py b/.venv/lib/python3.12/site-packages/openai/_utils/_proxy.py
new file mode 100644
index 00000000..ffd883e9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_proxy.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from abc import ABC, abstractmethod
+from typing import Generic, TypeVar, Iterable, cast
+from typing_extensions import override
+
+T = TypeVar("T")
+
+
+class LazyProxy(Generic[T], ABC):
+ """Implements data methods to pretend that an instance is another instance.
+
+ This includes forwarding attribute access and other methods.
+ """
+
+ # Note: we have to special case proxies that themselves return proxies
+ # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz`
+
+ def __getattr__(self, attr: str) -> object:
+ proxied = self.__get_proxied__()
+ if isinstance(proxied, LazyProxy):
+ return proxied # pyright: ignore
+ return getattr(proxied, attr)
+
+ @override
+ def __repr__(self) -> str:
+ proxied = self.__get_proxied__()
+ if isinstance(proxied, LazyProxy):
+ return proxied.__class__.__name__
+ return repr(self.__get_proxied__())
+
+ @override
+ def __str__(self) -> str:
+ proxied = self.__get_proxied__()
+ if isinstance(proxied, LazyProxy):
+ return proxied.__class__.__name__
+ return str(proxied)
+
+ @override
+ def __dir__(self) -> Iterable[str]:
+ proxied = self.__get_proxied__()
+ if isinstance(proxied, LazyProxy):
+ return []
+ return proxied.__dir__()
+
+ @property # type: ignore
+ @override
+ def __class__(self) -> type: # pyright: ignore
+ proxied = self.__get_proxied__()
+ if issubclass(type(proxied), LazyProxy):
+ return type(proxied)
+ return proxied.__class__
+
+ def __get_proxied__(self) -> T:
+ return self.__load__()
+
+ def __as_proxied__(self) -> T:
+ """Helper method that returns the current proxy, typed as the loaded object"""
+ return cast(T, self)
+
+ @abstractmethod
+ def __load__(self) -> T: ...
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_reflection.py b/.venv/lib/python3.12/site-packages/openai/_utils/_reflection.py
new file mode 100644
index 00000000..bdaca29e
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_reflection.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+import inspect
+from typing import Any, Callable
+
+
+def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool:
+ """Returns whether or not the given function has a specific parameter"""
+ sig = inspect.signature(func)
+ return arg_name in sig.parameters
+
+
+def assert_signatures_in_sync(
+ source_func: Callable[..., Any],
+ check_func: Callable[..., Any],
+ *,
+ exclude_params: set[str] = set(),
+ description: str = "",
+) -> None:
+ """Ensure that the signature of the second function matches the first."""
+
+ check_sig = inspect.signature(check_func)
+ source_sig = inspect.signature(source_func)
+
+ errors: list[str] = []
+
+ for name, source_param in source_sig.parameters.items():
+ if name in exclude_params:
+ continue
+
+ custom_param = check_sig.parameters.get(name)
+ if not custom_param:
+ errors.append(f"the `{name}` param is missing")
+ continue
+
+ if custom_param.annotation != source_param.annotation:
+ errors.append(
+ f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}"
+ )
+ continue
+
+ if errors:
+ raise AssertionError(
+ f"{len(errors)} errors encountered when comparing signatures{description}:\n\n" + "\n\n".join(errors)
+ )
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_streams.py b/.venv/lib/python3.12/site-packages/openai/_utils/_streams.py
new file mode 100644
index 00000000..f4a0208f
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_streams.py
@@ -0,0 +1,12 @@
+from typing import Any
+from typing_extensions import Iterator, AsyncIterator
+
+
+def consume_sync_iterator(iterator: Iterator[Any]) -> None:
+ for _ in iterator:
+ ...
+
+
+async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None:
+ async for _ in iterator:
+ ...
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_sync.py b/.venv/lib/python3.12/site-packages/openai/_utils/_sync.py
new file mode 100644
index 00000000..ad7ec71b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_sync.py
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+import sys
+import asyncio
+import functools
+import contextvars
+from typing import Any, TypeVar, Callable, Awaitable
+from typing_extensions import ParamSpec
+
+import anyio
+import sniffio
+import anyio.to_thread
+
+T_Retval = TypeVar("T_Retval")
+T_ParamSpec = ParamSpec("T_ParamSpec")
+
+
+if sys.version_info >= (3, 9):
+ _asyncio_to_thread = asyncio.to_thread
+else:
+ # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
+ # for Python 3.8 support
+ async def _asyncio_to_thread(
+ func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
+ ) -> Any:
+ """Asynchronously run function *func* in a separate thread.
+
+ Any *args and **kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propagated,
+ allowing context variables from the main thread to be accessed in the
+ separate thread.
+
+ Returns a coroutine that can be awaited to get the eventual result of *func*.
+ """
+ loop = asyncio.events.get_running_loop()
+ ctx = contextvars.copy_context()
+ func_call = functools.partial(ctx.run, func, *args, **kwargs)
+ return await loop.run_in_executor(None, func_call)
+
+
+async def to_thread(
+ func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs
+) -> T_Retval:
+ if sniffio.current_async_library() == "asyncio":
+ return await _asyncio_to_thread(func, *args, **kwargs)
+
+ return await anyio.to_thread.run_sync(
+ functools.partial(func, *args, **kwargs),
+ )
+
+
+# inspired by `asyncer`, https://github.com/tiangolo/asyncer
+def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]:
+ """
+ Take a blocking function and create an async one that receives the same
+ positional and keyword arguments. For python version 3.9 and above, it uses
+ asyncio.to_thread to run the function in a separate thread. For python version
+ 3.8, it uses locally defined copy of the asyncio.to_thread function which was
+ introduced in python 3.9.
+
+ Usage:
+
+ ```python
+ def blocking_func(arg1, arg2, kwarg1=None):
+ # blocking code
+ return result
+
+
+ result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1)
+ ```
+
+ ## Arguments
+
+ `function`: a blocking regular callable (e.g. a function)
+
+ ## Return
+
+ An async function that takes the same positional and keyword arguments as the
+ original one, that when called runs the same original function in a thread worker
+ and returns the result.
+ """
+
+ async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval:
+ return await to_thread(function, *args, **kwargs)
+
+ return wrapper
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_transform.py b/.venv/lib/python3.12/site-packages/openai/_utils/_transform.py
new file mode 100644
index 00000000..18afd9d8
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_transform.py
@@ -0,0 +1,402 @@
+from __future__ import annotations
+
+import io
+import base64
+import pathlib
+from typing import Any, Mapping, TypeVar, cast
+from datetime import date, datetime
+from typing_extensions import Literal, get_args, override, get_type_hints
+
+import anyio
+import pydantic
+
+from ._utils import (
+ is_list,
+ is_mapping,
+ is_iterable,
+)
+from .._files import is_base64_file_input
+from ._typing import (
+ is_list_type,
+ is_union_type,
+ extract_type_arg,
+ is_iterable_type,
+ is_required_type,
+ is_annotated_type,
+ strip_annotated_type,
+)
+from .._compat import get_origin, model_dump, is_typeddict
+
+_T = TypeVar("_T")
+
+
+# TODO: support for drilling globals() and locals()
+# TODO: ensure works correctly with forward references in all cases
+
+
+PropertyFormat = Literal["iso8601", "base64", "custom"]
+
+
+class PropertyInfo:
+ """Metadata class to be used in Annotated types to provide information about a given type.
+
+ For example:
+
+ class MyParams(TypedDict):
+ account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')]
+
+ This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API.
+ """
+
+ alias: str | None
+ format: PropertyFormat | None
+ format_template: str | None
+ discriminator: str | None
+
+ def __init__(
+ self,
+ *,
+ alias: str | None = None,
+ format: PropertyFormat | None = None,
+ format_template: str | None = None,
+ discriminator: str | None = None,
+ ) -> None:
+ self.alias = alias
+ self.format = format
+ self.format_template = format_template
+ self.discriminator = discriminator
+
+ @override
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')"
+
+
+def maybe_transform(
+ data: object,
+ expected_type: object,
+) -> Any | None:
+ """Wrapper over `transform()` that allows `None` to be passed.
+
+ See `transform()` for more details.
+ """
+ if data is None:
+ return None
+ return transform(data, expected_type)
+
+
+# Wrapper over _transform_recursive providing fake types
+def transform(
+ data: _T,
+ expected_type: object,
+) -> _T:
+ """Transform dictionaries based off of type information from the given type, for example:
+
+ ```py
+ class Params(TypedDict, total=False):
+ card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
+
+
+ transformed = transform({"card_id": "<my card ID>"}, Params)
+ # {'cardID': '<my card ID>'}
+ ```
+
+ Any keys / data that does not have type information given will be included as is.
+
+ It should be noted that the transformations that this function does are not represented in the type system.
+ """
+ transformed = _transform_recursive(data, annotation=cast(type, expected_type))
+ return cast(_T, transformed)
+
+
+def _get_annotated_type(type_: type) -> type | None:
+ """If the given type is an `Annotated` type then it is returned, if not `None` is returned.
+
+ This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]`
+ """
+ if is_required_type(type_):
+ # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]`
+ type_ = get_args(type_)[0]
+
+ if is_annotated_type(type_):
+ return type_
+
+ return None
+
+
+def _maybe_transform_key(key: str, type_: type) -> str:
+ """Transform the given `data` based on the annotations provided in `type_`.
+
+ Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata.
+ """
+ annotated_type = _get_annotated_type(type_)
+ if annotated_type is None:
+ # no `Annotated` definition for this type, no transformation needed
+ return key
+
+ # ignore the first argument as it is the actual type
+ annotations = get_args(annotated_type)[1:]
+ for annotation in annotations:
+ if isinstance(annotation, PropertyInfo) and annotation.alias is not None:
+ return annotation.alias
+
+ return key
+
+
+def _transform_recursive(
+ data: object,
+ *,
+ annotation: type,
+ inner_type: type | None = None,
+) -> object:
+ """Transform the given data against the expected type.
+
+ Args:
+ annotation: The direct type annotation given to the particular piece of data.
+ This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
+
+ inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
+ is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
+ the list can be transformed using the metadata from the container type.
+
+ Defaults to the same value as the `annotation` argument.
+ """
+ if inner_type is None:
+ inner_type = annotation
+
+ stripped_type = strip_annotated_type(inner_type)
+ origin = get_origin(stripped_type) or stripped_type
+ if is_typeddict(stripped_type) and is_mapping(data):
+ return _transform_typeddict(data, stripped_type)
+
+ if origin == dict and is_mapping(data):
+ items_type = get_args(stripped_type)[1]
+ return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
+
+ if (
+ # List[T]
+ (is_list_type(stripped_type) and is_list(data))
+ # Iterable[T]
+ or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ ):
+ # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
+ # intended as an iterable, so we don't transform it.
+ if isinstance(data, dict):
+ return cast(object, data)
+
+ inner_type = extract_type_arg(stripped_type, 0)
+ return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
+
+ if is_union_type(stripped_type):
+ # For union types we run the transformation against all subtypes to ensure that everything is transformed.
+ #
+ # TODO: there may be edge cases where the same normalized field name will transform to two different names
+ # in different subtypes.
+ for subtype in get_args(stripped_type):
+ data = _transform_recursive(data, annotation=annotation, inner_type=subtype)
+ return data
+
+ if isinstance(data, pydantic.BaseModel):
+ return model_dump(data, exclude_unset=True, mode="json")
+
+ annotated_type = _get_annotated_type(annotation)
+ if annotated_type is None:
+ return data
+
+ # ignore the first argument as it is the actual type
+ annotations = get_args(annotated_type)[1:]
+ for annotation in annotations:
+ if isinstance(annotation, PropertyInfo) and annotation.format is not None:
+ return _format_data(data, annotation.format, annotation.format_template)
+
+ return data
+
+
+def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
+ if isinstance(data, (date, datetime)):
+ if format_ == "iso8601":
+ return data.isoformat()
+
+ if format_ == "custom" and format_template is not None:
+ return data.strftime(format_template)
+
+ if format_ == "base64" and is_base64_file_input(data):
+ binary: str | bytes | None = None
+
+ if isinstance(data, pathlib.Path):
+ binary = data.read_bytes()
+ elif isinstance(data, io.IOBase):
+ binary = data.read()
+
+ if isinstance(binary, str): # type: ignore[unreachable]
+ binary = binary.encode()
+
+ if not isinstance(binary, bytes):
+ raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
+
+ return base64.b64encode(binary).decode("ascii")
+
+ return data
+
+
+def _transform_typeddict(
+ data: Mapping[str, object],
+ expected_type: type,
+) -> Mapping[str, object]:
+ result: dict[str, object] = {}
+ annotations = get_type_hints(expected_type, include_extras=True)
+ for key, value in data.items():
+ type_ = annotations.get(key)
+ if type_ is None:
+ # we do not have a type annotation for this field, leave it as is
+ result[key] = value
+ else:
+ result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_)
+ return result
+
+
+async def async_maybe_transform(
+ data: object,
+ expected_type: object,
+) -> Any | None:
+ """Wrapper over `async_transform()` that allows `None` to be passed.
+
+ See `async_transform()` for more details.
+ """
+ if data is None:
+ return None
+ return await async_transform(data, expected_type)
+
+
+async def async_transform(
+ data: _T,
+ expected_type: object,
+) -> _T:
+ """Transform dictionaries based off of type information from the given type, for example:
+
+ ```py
+ class Params(TypedDict, total=False):
+ card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]]
+
+
+ transformed = transform({"card_id": "<my card ID>"}, Params)
+ # {'cardID': '<my card ID>'}
+ ```
+
+ Any keys / data that does not have type information given will be included as is.
+
+ It should be noted that the transformations that this function does are not represented in the type system.
+ """
+ transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type))
+ return cast(_T, transformed)
+
+
+async def _async_transform_recursive(
+ data: object,
+ *,
+ annotation: type,
+ inner_type: type | None = None,
+) -> object:
+ """Transform the given data against the expected type.
+
+ Args:
+ annotation: The direct type annotation given to the particular piece of data.
+ This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc
+
+ inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type
+ is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in
+ the list can be transformed using the metadata from the container type.
+
+ Defaults to the same value as the `annotation` argument.
+ """
+ if inner_type is None:
+ inner_type = annotation
+
+ stripped_type = strip_annotated_type(inner_type)
+ origin = get_origin(stripped_type) or stripped_type
+ if is_typeddict(stripped_type) and is_mapping(data):
+ return await _async_transform_typeddict(data, stripped_type)
+
+ if origin == dict and is_mapping(data):
+ items_type = get_args(stripped_type)[1]
+ return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()}
+
+ if (
+ # List[T]
+ (is_list_type(stripped_type) and is_list(data))
+ # Iterable[T]
+ or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str))
+ ):
+ # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually
+ # intended as an iterable, so we don't transform it.
+ if isinstance(data, dict):
+ return cast(object, data)
+
+ inner_type = extract_type_arg(stripped_type, 0)
+ return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data]
+
+ if is_union_type(stripped_type):
+ # For union types we run the transformation against all subtypes to ensure that everything is transformed.
+ #
+ # TODO: there may be edge cases where the same normalized field name will transform to two different names
+ # in different subtypes.
+ for subtype in get_args(stripped_type):
+ data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype)
+ return data
+
+ if isinstance(data, pydantic.BaseModel):
+ return model_dump(data, exclude_unset=True, mode="json")
+
+ annotated_type = _get_annotated_type(annotation)
+ if annotated_type is None:
+ return data
+
+ # ignore the first argument as it is the actual type
+ annotations = get_args(annotated_type)[1:]
+ for annotation in annotations:
+ if isinstance(annotation, PropertyInfo) and annotation.format is not None:
+ return await _async_format_data(data, annotation.format, annotation.format_template)
+
+ return data
+
+
+async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object:
+ if isinstance(data, (date, datetime)):
+ if format_ == "iso8601":
+ return data.isoformat()
+
+ if format_ == "custom" and format_template is not None:
+ return data.strftime(format_template)
+
+ if format_ == "base64" and is_base64_file_input(data):
+ binary: str | bytes | None = None
+
+ if isinstance(data, pathlib.Path):
+ binary = await anyio.Path(data).read_bytes()
+ elif isinstance(data, io.IOBase):
+ binary = data.read()
+
+ if isinstance(binary, str): # type: ignore[unreachable]
+ binary = binary.encode()
+
+ if not isinstance(binary, bytes):
+ raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}")
+
+ return base64.b64encode(binary).decode("ascii")
+
+ return data
+
+
+async def _async_transform_typeddict(
+ data: Mapping[str, object],
+ expected_type: type,
+) -> Mapping[str, object]:
+ result: dict[str, object] = {}
+ annotations = get_type_hints(expected_type, include_extras=True)
+ for key, value in data.items():
+ type_ = annotations.get(key)
+ if type_ is None:
+ # we do not have a type annotation for this field, leave it as is
+ result[key] = value
+ else:
+ result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_)
+ return result
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_typing.py b/.venv/lib/python3.12/site-packages/openai/_utils/_typing.py
new file mode 100644
index 00000000..278749b1
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_typing.py
@@ -0,0 +1,149 @@
+from __future__ import annotations
+
+import sys
+import typing
+import typing_extensions
+from typing import Any, TypeVar, Iterable, cast
+from collections import abc as _c_abc
+from typing_extensions import (
+ TypeIs,
+ Required,
+ Annotated,
+ get_args,
+ get_origin,
+)
+
+from .._types import InheritsGeneric
+from .._compat import is_union as _is_union
+
+
+def is_annotated_type(typ: type) -> bool:
+ return get_origin(typ) == Annotated
+
+
+def is_list_type(typ: type) -> bool:
+ return (get_origin(typ) or typ) == list
+
+
+def is_iterable_type(typ: type) -> bool:
+ """If the given type is `typing.Iterable[T]`"""
+ origin = get_origin(typ) or typ
+ return origin == Iterable or origin == _c_abc.Iterable
+
+
+def is_union_type(typ: type) -> bool:
+ return _is_union(get_origin(typ))
+
+
+def is_required_type(typ: type) -> bool:
+ return get_origin(typ) == Required
+
+
+def is_typevar(typ: type) -> bool:
+ # type ignore is required because type checkers
+ # think this expression will always return False
+ return type(typ) == TypeVar # type: ignore
+
+
+_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,)
+if sys.version_info >= (3, 12):
+ _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType)
+
+
+def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]:
+ """Return whether the provided argument is an instance of `TypeAliasType`.
+
+ ```python
+ type Int = int
+ is_type_alias_type(Int)
+ # > True
+ Str = TypeAliasType("Str", str)
+ is_type_alias_type(Str)
+ # > True
+ ```
+ """
+ return isinstance(tp, _TYPE_ALIAS_TYPES)
+
+
+# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]]
+def strip_annotated_type(typ: type) -> type:
+ if is_required_type(typ) or is_annotated_type(typ):
+ return strip_annotated_type(cast(type, get_args(typ)[0]))
+
+ return typ
+
+
+def extract_type_arg(typ: type, index: int) -> type:
+ args = get_args(typ)
+ try:
+ return cast(type, args[index])
+ except IndexError as err:
+ raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err
+
+
+def extract_type_var_from_base(
+ typ: type,
+ *,
+ generic_bases: tuple[type, ...],
+ index: int,
+ failure_message: str | None = None,
+) -> type:
+ """Given a type like `Foo[T]`, returns the generic type variable `T`.
+
+ This also handles the case where a concrete subclass is given, e.g.
+ ```py
+ class MyResponse(Foo[bytes]):
+ ...
+
+ extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes
+ ```
+
+ And where a generic subclass is given:
+ ```py
+ _T = TypeVar('_T')
+ class MyResponse(Foo[_T]):
+ ...
+
+ extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes
+ ```
+ """
+ cls = cast(object, get_origin(typ) or typ)
+ if cls in generic_bases:
+ # we're given the class directly
+ return extract_type_arg(typ, index)
+
+ # if a subclass is given
+ # ---
+ # this is needed as __orig_bases__ is not present in the typeshed stubs
+ # because it is intended to be for internal use only, however there does
+ # not seem to be a way to resolve generic TypeVars for inherited subclasses
+ # without using it.
+ if isinstance(cls, InheritsGeneric):
+ target_base_class: Any | None = None
+ for base in cls.__orig_bases__:
+ if base.__origin__ in generic_bases:
+ target_base_class = base
+ break
+
+ if target_base_class is None:
+ raise RuntimeError(
+ "Could not find the generic base class;\n"
+ "This should never happen;\n"
+ f"Does {cls} inherit from one of {generic_bases} ?"
+ )
+
+ extracted = extract_type_arg(target_base_class, index)
+ if is_typevar(extracted):
+ # If the extracted type argument is itself a type variable
+ # then that means the subclass itself is generic, so we have
+ # to resolve the type argument from the class itself, not
+ # the base class.
+ #
+ # Note: if there is more than 1 type argument, the subclass could
+ # change the ordering of the type arguments, this is not currently
+ # supported.
+ return extract_type_arg(typ, index)
+
+ return extracted
+
+ raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}")
diff --git a/.venv/lib/python3.12/site-packages/openai/_utils/_utils.py b/.venv/lib/python3.12/site-packages/openai/_utils/_utils.py
new file mode 100644
index 00000000..d6734e6b
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/openai/_utils/_utils.py
@@ -0,0 +1,430 @@
+from __future__ import annotations
+
+import os
+import re
+import inspect
+import functools
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Tuple,
+ Mapping,
+ TypeVar,
+ Callable,
+ Iterable,
+ Sequence,
+ cast,
+ overload,
+)
+from pathlib import Path
+from datetime import date, datetime
+from typing_extensions import TypeGuard
+
+import sniffio
+
+from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike
+from .._compat import parse_date as parse_date, parse_datetime as parse_datetime
+
+_T = TypeVar("_T")
+_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
+_MappingT = TypeVar("_MappingT", bound=Mapping[str, object])
+_SequenceT = TypeVar("_SequenceT", bound=Sequence[object])
+CallableT = TypeVar("CallableT", bound=Callable[..., Any])
+
+if TYPE_CHECKING:
+ from ..lib.azure import AzureOpenAI, AsyncAzureOpenAI
+
+
+def flatten(t: Iterable[Iterable[_T]]) -> list[_T]:
+ return [item for sublist in t for item in sublist]
+
+
+def extract_files(
+ # TODO: this needs to take Dict but variance issues.....
+ # create protocol type ?
+ query: Mapping[str, object],
+ *,
+ paths: Sequence[Sequence[str]],
+) -> list[tuple[str, FileTypes]]:
+ """Recursively extract files from the given dictionary based on specified paths.
+
+ A path may look like this ['foo', 'files', '<array>', 'data'].
+
+ Note: this mutates the given dictionary.
+ """
+ files: list[tuple[str, FileTypes]] = []
+ for path in paths:
+ files.extend(_extract_items(query, path, index=0, flattened_key=None))
+ return files
+
+
+def _extract_items(
+ obj: object,
+ path: Sequence[str],
+ *,
+ index: int,
+ flattened_key: str | None,
+) -> list[tuple[str, FileTypes]]:
+ try:
+ key = path[index]
+ except IndexError:
+ if isinstance(obj, NotGiven):
+ # no value was provided - we can safely ignore
+ return []
+
+ # cyclical import
+ from .._files import assert_is_file_content
+
+ # We have exhausted the path, return the entry we found.
+ assert_is_file_content(obj, key=flattened_key)
+ assert flattened_key is not None
+ return [(flattened_key, cast(FileTypes, obj))]
+
+ index += 1
+ if is_dict(obj):
+ try:
+ # We are at the last entry in the path so we must remove the field
+ if (len(path)) == index:
+ item = obj.pop(key)
+ else:
+ item = obj[key]
+ except KeyError:
+ # Key was not present in the dictionary, this is not indicative of an error
+ # as the given path may not point to a required field. We also do not want
+ # to enforce required fields as the API may differ from the spec in some cases.
+ return []
+ if flattened_key is None:
+ flattened_key = key
+ else:
+ flattened_key += f"[{key}]"
+ return _extract_items(
+ item,
+ path,
+ index=index,
+ flattened_key=flattened_key,
+ )
+ elif is_list(obj):
+ if key != "<array>":
+ return []
+
+ return flatten(
+ [
+ _extract_items(
+ item,
+ path,
+ index=index,
+ flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
+ )
+ for item in obj
+ ]
+ )
+
+ # Something unexpected was passed, just ignore it.
+ return []
+
+
+def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]:
+ return not isinstance(obj, NotGiven)
+
+
+# Type safe methods for narrowing types with TypeVars.
+# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown],
+# however this cause Pyright to rightfully report errors. As we know we don't
+# care about the contained types we can safely use `object` in it's place.
+#
+# There are two separate functions defined, `is_*` and `is_*_t` for different use cases.
+# `is_*` is for when you're dealing with an unknown input
+# `is_*_t` is for when you're narrowing a known union type to a specific subset
+
+
+def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]:
+ return isinstance(obj, tuple)
+
+
+def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]:
+ return isinstance(obj, tuple)
+
+
+def is_sequence(obj: object) -> TypeGuard[Sequence[object]]:
+ return isinstance(obj, Sequence)
+
+
+def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]:
+ return isinstance(obj, Sequence)
+
+
+def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]:
+ return isinstance(obj, Mapping)
+
+
+def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]:
+ return isinstance(obj, Mapping)
+
+
+def is_dict(obj: object) -> TypeGuard[dict[object, object]]:
+ return isinstance(obj, dict)
+
+
+def is_list(obj: object) -> TypeGuard[list[object]]:
+ return isinstance(obj, list)
+
+
+def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
+ return isinstance(obj, Iterable)
+
+
+def deepcopy_minimal(item: _T) -> _T:
+ """Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
+
+ - mappings, e.g. `dict`
+ - list
+
+ This is done for performance reasons.
+ """
+ if is_mapping(item):
+ return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
+ if is_list(item):
+ return cast(_T, [deepcopy_minimal(entry) for entry in item])
+ return item
+
+
+# copied from https://github.com/Rapptz/RoboDanny
+def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
+ size = len(seq)
+ if size == 0:
+ return ""
+
+ if size == 1:
+ return seq[0]
+
+ if size == 2:
+ return f"{seq[0]} {final} {seq[1]}"
+
+ return delim.join(seq[:-1]) + f" {final} {seq[-1]}"
+
+
+def quote(string: str) -> str:
+ """Add single quotation marks around the given string. Does *not* do any escaping."""
+ return f"'{string}'"
+
+
+def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]:
+ """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function.
+
+ Useful for enforcing runtime validation of overloaded functions.
+
+ Example usage:
+ ```py
+ @overload
+ def foo(*, a: str) -> str: ...
+
+
+ @overload
+ def foo(*, b: bool) -> str: ...
+
+
+ # This enforces the same constraints that a static type checker would
+ # i.e. that either a or b must be passed to the function
+ @required_args(["a"], ["b"])
+ def foo(*, a: str | None = None, b: bool | None = None) -> str: ...
+ ```
+ """
+
+ def inner(func: CallableT) -> CallableT:
+ params = inspect.signature(func).parameters
+ positional = [
+ name
+ for name, param in params.items()
+ if param.kind
+ in {
+ param.POSITIONAL_ONLY,
+ param.POSITIONAL_OR_KEYWORD,
+ }
+ ]
+
+ @functools.wraps(func)
+ def wrapper(*args: object, **kwargs: object) -> object:
+ given_params: set[str] = set()
+ for i, _ in enumerate(args):
+ try:
+ given_params.add(positional[i])
+ except IndexError:
+ raise TypeError(
+ f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given"
+ ) from None
+
+ for key in kwargs.keys():
+ given_params.add(key)
+
+ for variant in variants:
+ matches = all((param in given_params for param in variant))
+ if matches:
+ break
+ else: # no break
+ if len(variants) > 1:
+ variations = human_join(
+ ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants]
+ )
+ msg = f"Missing required arguments; Expected either {variations} arguments to be given"
+ else:
+ assert len(variants) > 0
+
+ # TODO: this error message is not deterministic
+ missing = list(set(variants[0]) - given_params)
+ if len(missing) > 1:
+ msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}"
+ else:
+ msg = f"Missing required argument: {quote(missing[0])}"
+ raise TypeError(msg)
+ return func(*args, **kwargs)
+
+ return wrapper # type: ignore
+
+ return inner
+
+
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+
+
+@overload
+def strip_not_given(obj: None) -> None: ...
+
+
+@overload
+def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ...
+
+
+@overload
+def strip_not_given(obj: object) -> object: ...
+
+
+def strip_not_given(obj: object | None) -> object:
+ """Remove all top-level keys where their values are instances of `NotGiven`"""
+ if obj is None:
+ return None
+
+ if not is_mapping(obj):
+ return obj
+
+ return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)}
+
+
+def coerce_integer(val: str) -> int:
+ return int(val, base=10)
+
+
+def coerce_float(val: str) -> float:
+ return float(val)
+
+
+def coerce_boolean(val: str) -> bool:
+ return val == "true" or val == "1" or val == "on"
+
+
+def maybe_coerce_integer(val: str | None) -> int | None:
+ if val is None:
+ return None
+ return coerce_integer(val)
+
+
+def maybe_coerce_float(val: str | None) -> float | None:
+ if val is None:
+ return None
+ return coerce_float(val)
+
+
+def maybe_coerce_boolean(val: str | None) -> bool | None:
+ if val is None:
+ return None
+ return coerce_boolean(val)
+
+
+def removeprefix(string: str, prefix: str) -> str:
+ """Remove a prefix from a string.
+
+ Backport of `str.removeprefix` for Python < 3.9
+ """
+ if string.startswith(prefix):
+ return string[len(prefix) :]
+ return string
+
+
+def removesuffix(string: str, suffix: str) -> str:
+ """Remove a suffix from a string.
+
+ Backport of `str.removesuffix` for Python < 3.9
+ """
+ if string.endswith(suffix):
+ return string[: -len(suffix)]
+ return string
+
+
+def file_from_path(path: str) -> FileTypes:
+ contents = Path(path).read_bytes()
+ file_name = os.path.basename(path)
+ return (file_name, contents)
+
+
+def get_required_header(headers: HeadersLike, header: str) -> str:
+ lower_header = header.lower()
+ if is_mapping_t(headers):
+ # mypy doesn't understand the type narrowing here
+ for k, v in headers.items(): # type: ignore
+ if k.lower() == lower_header and isinstance(v, str):
+ return v
+
+ # to deal with the case where the header looks like Stainless-Event-Id
+ intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize())
+
+ for normalized_header in [header, lower_header, header.upper(), intercaps_header]:
+ value = headers.get(normalized_header)
+ if value:
+ return value
+
+ raise ValueError(f"Could not find {header} header")
+
+
+def get_async_library() -> str:
+ try:
+ return sniffio.current_async_library()
+ except Exception:
+ return "false"
+
+
+def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]:
+ """A version of functools.lru_cache that retains the type signature
+ for the wrapped function arguments.
+ """
+ wrapper = functools.lru_cache( # noqa: TID251
+ maxsize=maxsize,
+ )
+ return cast(Any, wrapper) # type: ignore[no-any-return]
+
+
+def json_safe(data: object) -> object:
+ """Translates a mapping / sequence recursively in the same fashion
+ as `pydantic` v2's `model_dump(mode="json")`.
+ """
+ if is_mapping(data):
+ return {json_safe(key): json_safe(value) for key, value in data.items()}
+
+ if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)):
+ return [json_safe(item) for item in data]
+
+ if isinstance(data, (datetime, date)):
+ return data.isoformat()
+
+ return data
+
+
+def is_azure_client(client: object) -> TypeGuard[AzureOpenAI]:
+ from ..lib.azure import AzureOpenAI
+
+ return isinstance(client, AzureOpenAI)
+
+
+def is_async_azure_client(client: object) -> TypeGuard[AsyncAzureOpenAI]:
+ from ..lib.azure import AsyncAzureOpenAI
+
+ return isinstance(client, AsyncAzureOpenAI)