about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy/chat/transformation.py33
1 files changed, 33 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy/chat/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy/chat/transformation.py
new file mode 100644
index 00000000..dadd921a
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/litellm_proxy/chat/transformation.py
@@ -0,0 +1,33 @@
+"""
+Translate from OpenAI's `/v1/chat/completions` to VLLM's `/v1/chat/completions`
+"""
+
+from typing import List, Optional, Tuple
+
+from litellm.secret_managers.main import get_secret_str
+
+from ...openai.chat.gpt_transformation import OpenAIGPTConfig
+
+
+class LiteLLMProxyChatConfig(OpenAIGPTConfig):
+    def _get_openai_compatible_provider_info(
+        self, api_base: Optional[str], api_key: Optional[str]
+    ) -> Tuple[Optional[str], Optional[str]]:
+        api_base = api_base or get_secret_str("LITELLM_PROXY_API_BASE")  # type: ignore
+        dynamic_api_key = api_key or get_secret_str("LITELLM_PROXY_API_KEY")
+        return api_base, dynamic_api_key
+
+    def get_models(
+        self, api_key: Optional[str] = None, api_base: Optional[str] = None
+    ) -> List[str]:
+        api_base, api_key = self._get_openai_compatible_provider_info(api_base, api_key)
+        if api_base is None:
+            raise ValueError(
+                "api_base not set for LiteLLM Proxy route. Set in env via `LITELLM_PROXY_API_BASE`"
+            )
+        models = super().get_models(api_key=api_key, api_base=api_base)
+        return [f"litellm_proxy/{model}" for model in models]
+
+    @staticmethod
+    def get_api_key(api_key: Optional[str] = None) -> Optional[str]:
+        return api_key or get_secret_str("LITELLM_PROXY_API_KEY")