aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-4a52a71956a8d46fcb7294ac71734504bb09bcc2.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py284
1 files changed, 284 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py b/.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py
new file mode 100644
index 00000000..d45ac9a3
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/azure/files/handler.py
@@ -0,0 +1,284 @@
+from typing import Any, Coroutine, Optional, Union, cast
+
+import httpx
+from openai import AsyncAzureOpenAI, AzureOpenAI
+from openai.types.file_deleted import FileDeleted
+
+from litellm._logging import verbose_logger
+from litellm.types.llms.openai import *
+
+from ..common_utils import BaseAzureLLM
+
+
+class AzureOpenAIFilesAPI(BaseAzureLLM):
+ """
+ AzureOpenAI methods to support for batches
+ - create_file()
+ - retrieve_file()
+ - list_files()
+ - delete_file()
+ - file_content()
+ - update_file()
+ """
+
+ def __init__(self) -> None:
+ super().__init__()
+
+ async def acreate_file(
+ self,
+ create_file_data: CreateFileRequest,
+ openai_client: AsyncAzureOpenAI,
+ ) -> FileObject:
+ verbose_logger.debug("create_file_data=%s", create_file_data)
+ response = await openai_client.files.create(**create_file_data)
+ verbose_logger.debug("create_file_response=%s", response)
+ return response
+
+ def create_file(
+ self,
+ _is_async: bool,
+ create_file_data: CreateFileRequest,
+ api_base: Optional[str],
+ api_key: Optional[str],
+ api_version: Optional[str],
+ timeout: Union[float, httpx.Timeout],
+ max_retries: Optional[int],
+ client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
+ litellm_params: Optional[dict] = None,
+ ) -> Union[FileObject, Coroutine[Any, Any, FileObject]]:
+
+ openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
+ self.get_azure_openai_client(
+ litellm_params=litellm_params or {},
+ api_key=api_key,
+ api_base=api_base,
+ api_version=api_version,
+ client=client,
+ _is_async=_is_async,
+ )
+ )
+ if openai_client is None:
+ raise ValueError(
+ "AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
+ )
+
+ if _is_async is True:
+ if not isinstance(openai_client, AsyncAzureOpenAI):
+ raise ValueError(
+ "AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
+ )
+ return self.acreate_file( # type: ignore
+ create_file_data=create_file_data, openai_client=openai_client
+ )
+ response = openai_client.files.create(**create_file_data)
+ return response
+
+ async def afile_content(
+ self,
+ file_content_request: FileContentRequest,
+ openai_client: AsyncAzureOpenAI,
+ ) -> HttpxBinaryResponseContent:
+ response = await openai_client.files.content(**file_content_request)
+ return HttpxBinaryResponseContent(response=response.response)
+
+ def file_content(
+ self,
+ _is_async: bool,
+ file_content_request: FileContentRequest,
+ api_base: Optional[str],
+ api_key: Optional[str],
+ timeout: Union[float, httpx.Timeout],
+ max_retries: Optional[int],
+ api_version: Optional[str] = None,
+ client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
+ litellm_params: Optional[dict] = None,
+ ) -> Union[
+ HttpxBinaryResponseContent, Coroutine[Any, Any, HttpxBinaryResponseContent]
+ ]:
+ openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
+ self.get_azure_openai_client(
+ litellm_params=litellm_params or {},
+ api_key=api_key,
+ api_base=api_base,
+ api_version=api_version,
+ client=client,
+ _is_async=_is_async,
+ )
+ )
+ if openai_client is None:
+ raise ValueError(
+ "AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
+ )
+
+ if _is_async is True:
+ if not isinstance(openai_client, AsyncAzureOpenAI):
+ raise ValueError(
+ "AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
+ )
+ return self.afile_content( # type: ignore
+ file_content_request=file_content_request,
+ openai_client=openai_client,
+ )
+ response = cast(AzureOpenAI, openai_client).files.content(
+ **file_content_request
+ )
+
+ return HttpxBinaryResponseContent(response=response.response)
+
+ async def aretrieve_file(
+ self,
+ file_id: str,
+ openai_client: AsyncAzureOpenAI,
+ ) -> FileObject:
+ response = await openai_client.files.retrieve(file_id=file_id)
+ return response
+
+ def retrieve_file(
+ self,
+ _is_async: bool,
+ file_id: str,
+ api_base: Optional[str],
+ api_key: Optional[str],
+ timeout: Union[float, httpx.Timeout],
+ max_retries: Optional[int],
+ api_version: Optional[str] = None,
+ client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
+ litellm_params: Optional[dict] = None,
+ ):
+ openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
+ self.get_azure_openai_client(
+ litellm_params=litellm_params or {},
+ api_key=api_key,
+ api_base=api_base,
+ api_version=api_version,
+ client=client,
+ _is_async=_is_async,
+ )
+ )
+ if openai_client is None:
+ raise ValueError(
+ "AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
+ )
+
+ if _is_async is True:
+ if not isinstance(openai_client, AsyncAzureOpenAI):
+ raise ValueError(
+ "AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
+ )
+ return self.aretrieve_file( # type: ignore
+ file_id=file_id,
+ openai_client=openai_client,
+ )
+ response = openai_client.files.retrieve(file_id=file_id)
+
+ return response
+
+ async def adelete_file(
+ self,
+ file_id: str,
+ openai_client: AsyncAzureOpenAI,
+ ) -> FileDeleted:
+ response = await openai_client.files.delete(file_id=file_id)
+
+ if not isinstance(response, FileDeleted): # azure returns an empty string
+ return FileDeleted(id=file_id, deleted=True, object="file")
+ return response
+
+ def delete_file(
+ self,
+ _is_async: bool,
+ file_id: str,
+ api_base: Optional[str],
+ api_key: Optional[str],
+ timeout: Union[float, httpx.Timeout],
+ max_retries: Optional[int],
+ organization: Optional[str] = None,
+ api_version: Optional[str] = None,
+ client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
+ litellm_params: Optional[dict] = None,
+ ):
+ openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
+ self.get_azure_openai_client(
+ litellm_params=litellm_params or {},
+ api_key=api_key,
+ api_base=api_base,
+ api_version=api_version,
+ client=client,
+ _is_async=_is_async,
+ )
+ )
+ if openai_client is None:
+ raise ValueError(
+ "AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
+ )
+
+ if _is_async is True:
+ if not isinstance(openai_client, AsyncAzureOpenAI):
+ raise ValueError(
+ "AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
+ )
+ return self.adelete_file( # type: ignore
+ file_id=file_id,
+ openai_client=openai_client,
+ )
+ response = openai_client.files.delete(file_id=file_id)
+
+ if not isinstance(response, FileDeleted): # azure returns an empty string
+ return FileDeleted(id=file_id, deleted=True, object="file")
+
+ return response
+
+ async def alist_files(
+ self,
+ openai_client: AsyncAzureOpenAI,
+ purpose: Optional[str] = None,
+ ):
+ if isinstance(purpose, str):
+ response = await openai_client.files.list(purpose=purpose)
+ else:
+ response = await openai_client.files.list()
+ return response
+
+ def list_files(
+ self,
+ _is_async: bool,
+ api_base: Optional[str],
+ api_key: Optional[str],
+ timeout: Union[float, httpx.Timeout],
+ max_retries: Optional[int],
+ purpose: Optional[str] = None,
+ api_version: Optional[str] = None,
+ client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None,
+ litellm_params: Optional[dict] = None,
+ ):
+ openai_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = (
+ self.get_azure_openai_client(
+ litellm_params=litellm_params or {},
+ api_key=api_key,
+ api_base=api_base,
+ api_version=api_version,
+ client=client,
+ _is_async=_is_async,
+ )
+ )
+ if openai_client is None:
+ raise ValueError(
+ "AzureOpenAI client is not initialized. Make sure api_key is passed or OPENAI_API_KEY is set in the environment."
+ )
+
+ if _is_async is True:
+ if not isinstance(openai_client, AsyncAzureOpenAI):
+ raise ValueError(
+ "AzureOpenAI client is not an instance of AsyncAzureOpenAI. Make sure you passed an AsyncAzureOpenAI client."
+ )
+ return self.alist_files( # type: ignore
+ purpose=purpose,
+ openai_client=openai_client,
+ )
+
+ if isinstance(purpose, str):
+ response = openai_client.files.list(purpose=purpose)
+ else:
+ response = openai_client.files.list()
+
+ return response