aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py50
1 files changed, 50 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py
new file mode 100644
index 00000000..1285550c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/lm_studio/embed/transformation.py
@@ -0,0 +1,50 @@
+"""
+Transformation logic from OpenAI /v1/embeddings format to LM Studio's `/v1/embeddings` format.
+
+Why separate file? Make it easy to see how transformation works
+
+Docs - https://lmstudio.ai/docs/basics/server
+"""
+
+import types
+from typing import List
+
+
+class LmStudioEmbeddingConfig:
+ """
+ Reference: https://lmstudio.ai/docs/basics/server
+ """
+
+ def __init__(
+ self,
+ ) -> None:
+ locals_ = locals().copy()
+ for key, value in locals_.items():
+ if key != "self" and value is not None:
+ setattr(self.__class__, key, value)
+
+ @classmethod
+ def get_config(cls):
+ return {
+ k: v
+ for k, v in cls.__dict__.items()
+ if not k.startswith("__")
+ and not isinstance(
+ v,
+ (
+ types.FunctionType,
+ types.BuiltinFunctionType,
+ classmethod,
+ staticmethod,
+ ),
+ )
+ and v is not None
+ }
+
+ def get_supported_openai_params(self) -> List[str]:
+ return []
+
+ def map_openai_params(
+ self, non_default_params: dict, optional_params: dict
+ ) -> dict:
+ return optional_params