aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/handler.py49
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/transformation.py48
2 files changed, 97 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/handler.py b/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/handler.py
new file mode 100644
index 00000000..2eabcdbc
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/handler.py
@@ -0,0 +1,49 @@
+"""
+Calling logic for Databricks embeddings
+"""
+
+from typing import Optional
+
+from litellm.utils import EmbeddingResponse
+
+from ...openai_like.embedding.handler import OpenAILikeEmbeddingHandler
+from ..common_utils import DatabricksBase
+
+
+class DatabricksEmbeddingHandler(OpenAILikeEmbeddingHandler, DatabricksBase):
+ def embedding(
+ self,
+ model: str,
+ input: list,
+ timeout: float,
+ logging_obj,
+ api_key: Optional[str],
+ api_base: Optional[str],
+ optional_params: dict,
+ model_response: Optional[EmbeddingResponse] = None,
+ client=None,
+ aembedding=None,
+ custom_endpoint: Optional[bool] = None,
+ headers: Optional[dict] = None,
+ ) -> EmbeddingResponse:
+ api_base, headers = self.databricks_validate_environment(
+ api_base=api_base,
+ api_key=api_key,
+ endpoint_type="embeddings",
+ custom_endpoint=custom_endpoint,
+ headers=headers,
+ )
+ return super().embedding(
+ model=model,
+ input=input,
+ timeout=timeout,
+ logging_obj=logging_obj,
+ api_key=api_key,
+ api_base=api_base,
+ optional_params=optional_params,
+ model_response=model_response,
+ client=client,
+ aembedding=aembedding,
+ custom_endpoint=True,
+ headers=headers,
+ )
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/transformation.py
new file mode 100644
index 00000000..53e3b30d
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/databricks/embed/transformation.py
@@ -0,0 +1,48 @@
+"""
+Translates from OpenAI's `/v1/embeddings` to Databricks' `/embeddings`
+"""
+
+import types
+from typing import Optional
+
+
+class DatabricksEmbeddingConfig:
+ """
+ Reference: https://learn.microsoft.com/en-us/azure/databricks/machine-learning/foundation-models/api-reference#--embedding-task
+ """
+
+ instruction: Optional[str] = (
+ None # An optional instruction to pass to the embedding model. BGE Authors recommend 'Represent this sentence for searching relevant passages:' for retrieval queries
+ )
+
+ def __init__(self, instruction: Optional[str] = None) -> None:
+ locals_ = locals().copy()
+ for key, value in locals_.items():
+ if key != "self" and value is not None:
+ setattr(self.__class__, key, value)
+
+ @classmethod
+ def get_config(cls):
+ return {
+ k: v
+ for k, v in cls.__dict__.items()
+ if not k.startswith("__")
+ and not isinstance(
+ v,
+ (
+ types.FunctionType,
+ types.BuiltinFunctionType,
+ classmethod,
+ staticmethod,
+ ),
+ )
+ and v is not None
+ }
+
+ def get_supported_openai_params(
+ self,
+ ): # no optional openai embedding params supported
+ return []
+
+ def map_openai_params(self, non_default_params: dict, optional_params: dict):
+ return optional_params