about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py66
1 files changed, 66 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py b/.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py
new file mode 100644
index 00000000..5f2b8d71
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/maritalk.py
@@ -0,0 +1,66 @@
+from typing import List, Optional, Union
+
+from httpx._models import Headers
+
+from litellm.llms.base_llm.chat.transformation import BaseLLMException
+from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig
+
+
+class MaritalkError(BaseLLMException):
+    def __init__(
+        self,
+        status_code: int,
+        message: str,
+        headers: Optional[Union[dict, Headers]] = None,
+    ):
+        super().__init__(status_code=status_code, message=message, headers=headers)
+
+
+class MaritalkConfig(OpenAIGPTConfig):
+
+    def __init__(
+        self,
+        frequency_penalty: Optional[float] = None,
+        presence_penalty: Optional[float] = None,
+        top_p: Optional[float] = None,
+        top_k: Optional[int] = None,
+        temperature: Optional[float] = None,
+        max_tokens: Optional[int] = None,
+        n: Optional[int] = None,
+        stop: Optional[List[str]] = None,
+        stream: Optional[bool] = None,
+        stream_options: Optional[dict] = None,
+        tools: Optional[List[dict]] = None,
+        tool_choice: Optional[Union[str, dict]] = None,
+    ) -> None:
+        locals_ = locals().copy()
+        for key, value in locals_.items():
+            if key != "self" and value is not None:
+                setattr(self.__class__, key, value)
+
+    @classmethod
+    def get_config(cls):
+        return super().get_config()
+
+    def get_supported_openai_params(self, model: str) -> List:
+        return [
+            "frequency_penalty",
+            "presence_penalty",
+            "top_p",
+            "top_k",
+            "temperature",
+            "max_tokens",
+            "n",
+            "stop",
+            "stream",
+            "stream_options",
+            "tools",
+            "tool_choice",
+        ]
+
+    def get_error_class(
+        self, error_message: str, status_code: int, headers: Union[dict, Headers]
+    ) -> BaseLLMException:
+        return MaritalkError(
+            status_code=status_code, message=error_message, headers=headers
+        )