aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py20
1 files changed, 20 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py
new file mode 100644
index 00000000..147e8e92
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/chat/transformation.py
@@ -0,0 +1,20 @@
+"""
+Translate from OpenAI's `/v1/chat/completions` to LM Studio's `/chat/completions`
+"""
+
+from typing import Optional, Tuple
+
+from litellm.secret_managers.main import get_secret_str
+
+from ...openai.chat.gpt_transformation import OpenAIGPTConfig
+
+
+class LMStudioChatConfig(OpenAIGPTConfig):
+ def _get_openai_compatible_provider_info(
+ self, api_base: Optional[str], api_key: Optional[str]
+ ) -> Tuple[Optional[str], Optional[str]]:
+ api_base = api_base or get_secret_str("LM_STUDIO_API_BASE") # type: ignore
+ dynamic_api_key = (
+ api_key or get_secret_str("LM_STUDIO_API_KEY") or " "
+ ) # vllm does not require an api key
+ return api_base, dynamic_api_key