about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/openrouter
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/llms/openrouter
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/openrouter')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/openrouter/chat/transformation.py88
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/openrouter/common_utils.py5
2 files changed, 93 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/chat/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/chat/transformation.py
new file mode 100644
index 00000000..4b95ec87
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/chat/transformation.py
@@ -0,0 +1,88 @@
+"""
+Support for OpenAI's `/v1/chat/completions` endpoint. 
+
+Calls done in OpenAI/openai.py as OpenRouter is openai-compatible.
+
+Docs: https://openrouter.ai/docs/parameters
+"""
+
+from typing import Any, AsyncIterator, Iterator, Optional, Union
+
+import httpx
+
+from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator
+from litellm.llms.base_llm.chat.transformation import BaseLLMException
+from litellm.types.utils import ModelResponse, ModelResponseStream
+
+from ...openai.chat.gpt_transformation import OpenAIGPTConfig
+from ..common_utils import OpenRouterException
+
+
+class OpenrouterConfig(OpenAIGPTConfig):
+
+    def map_openai_params(
+        self,
+        non_default_params: dict,
+        optional_params: dict,
+        model: str,
+        drop_params: bool,
+    ) -> dict:
+        mapped_openai_params = super().map_openai_params(
+            non_default_params, optional_params, model, drop_params
+        )
+
+        # OpenRouter-only parameters
+        extra_body = {}
+        transforms = non_default_params.pop("transforms", None)
+        models = non_default_params.pop("models", None)
+        route = non_default_params.pop("route", None)
+        if transforms is not None:
+            extra_body["transforms"] = transforms
+        if models is not None:
+            extra_body["models"] = models
+        if route is not None:
+            extra_body["route"] = route
+        mapped_openai_params["extra_body"] = (
+            extra_body  # openai client supports `extra_body` param
+        )
+        return mapped_openai_params
+
+    def get_error_class(
+        self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers]
+    ) -> BaseLLMException:
+        return OpenRouterException(
+            message=error_message,
+            status_code=status_code,
+            headers=headers,
+        )
+
+    def get_model_response_iterator(
+        self,
+        streaming_response: Union[Iterator[str], AsyncIterator[str], ModelResponse],
+        sync_stream: bool,
+        json_mode: Optional[bool] = False,
+    ) -> Any:
+        return OpenRouterChatCompletionStreamingHandler(
+            streaming_response=streaming_response,
+            sync_stream=sync_stream,
+            json_mode=json_mode,
+        )
+
+
+class OpenRouterChatCompletionStreamingHandler(BaseModelResponseIterator):
+
+    def chunk_parser(self, chunk: dict) -> ModelResponseStream:
+        try:
+            new_choices = []
+            for choice in chunk["choices"]:
+                choice["delta"]["reasoning_content"] = choice["delta"].get("reasoning")
+                new_choices.append(choice)
+            return ModelResponseStream(
+                id=chunk["id"],
+                object="chat.completion.chunk",
+                created=chunk["created"],
+                model=chunk["model"],
+                choices=new_choices,
+            )
+        except Exception as e:
+            raise e
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/common_utils.py b/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/common_utils.py
new file mode 100644
index 00000000..96e53a5a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/openrouter/common_utils.py
@@ -0,0 +1,5 @@
+from litellm.llms.base_llm.chat.transformation import BaseLLMException
+
+
+class OpenRouterException(BaseLLMException):
+    pass