From 4a52a71956a8d46fcb7294ac71734504bb09bcc2 Mon Sep 17 00:00:00 2001 From: S. Solomon Darnell Date: Fri, 28 Mar 2025 21:52:21 -0500 Subject: two version of R2R are here --- .../litellm/llms/ai21/chat/transformation.py | 70 ++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .venv/lib/python3.12/site-packages/litellm/llms/ai21/chat/transformation.py (limited to '.venv/lib/python3.12/site-packages/litellm/llms/ai21/chat/transformation.py') diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/ai21/chat/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/ai21/chat/transformation.py new file mode 100644 index 00000000..1a07b50d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/litellm/llms/ai21/chat/transformation.py @@ -0,0 +1,70 @@ +""" +AI21 Chat Completions API + +this is OpenAI compatible - no translation needed / occurs +""" + +from typing import Optional, Union + +from ...openai_like.chat.transformation import OpenAILikeChatConfig + + +class AI21ChatConfig(OpenAILikeChatConfig): + """ + Reference: https://docs.ai21.com/reference/jamba-15-api-ref#request-parameters + + Below are the parameters: + """ + + tools: Optional[list] = None + response_format: Optional[dict] = None + documents: Optional[list] = None + max_tokens: Optional[int] = None + stop: Optional[Union[str, list]] = None + n: Optional[int] = None + stream: Optional[bool] = None + seed: Optional[int] = None + tool_choice: Optional[str] = None + user: Optional[str] = None + + def __init__( + self, + tools: Optional[list] = None, + response_format: Optional[dict] = None, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + stop: Optional[Union[str, list]] = None, + n: Optional[int] = None, + stream: Optional[bool] = None, + seed: Optional[int] = None, + tool_choice: Optional[str] = None, + user: Optional[str] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return super().get_config() + + def get_supported_openai_params(self, model: str) -> list: + """ + Get the supported OpenAI params for the given model + + """ + + return [ + "tools", + "response_format", + "max_tokens", + "max_completion_tokens", + "temperature", + "stop", + "n", + "stream", + "seed", + "tool_choice", + ] -- cgit v1.2.3