about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/assistants/utils.py
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/assistants/utils.py
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are here HEAD master
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/assistants/utils.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/assistants/utils.py161
1 files changed, 161 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/assistants/utils.py b/.venv/lib/python3.12/site-packages/litellm/assistants/utils.py
new file mode 100644
index 00000000..f8fc6ee0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/assistants/utils.py
@@ -0,0 +1,161 @@
+from typing import Optional, Union
+
+import litellm
+
+from ..exceptions import UnsupportedParamsError
+from ..types.llms.openai import *
+
+
+def get_optional_params_add_message(
+    role: Optional[str],
+    content: Optional[
+        Union[
+            str,
+            List[
+                Union[
+                    MessageContentTextObject,
+                    MessageContentImageFileObject,
+                    MessageContentImageURLObject,
+                ]
+            ],
+        ]
+    ],
+    attachments: Optional[List[Attachment]],
+    metadata: Optional[dict],
+    custom_llm_provider: str,
+    **kwargs,
+):
+    """
+    Azure doesn't support 'attachments' for creating a message
+
+    Reference - https://learn.microsoft.com/en-us/azure/ai-services/openai/assistants-reference-messages?tabs=python#create-message
+    """
+    passed_params = locals()
+    custom_llm_provider = passed_params.pop("custom_llm_provider")
+    special_params = passed_params.pop("kwargs")
+    for k, v in special_params.items():
+        passed_params[k] = v
+
+    default_params = {
+        "role": None,
+        "content": None,
+        "attachments": None,
+        "metadata": None,
+    }
+
+    non_default_params = {
+        k: v
+        for k, v in passed_params.items()
+        if (k in default_params and v != default_params[k])
+    }
+    optional_params = {}
+
+    ## raise exception if non-default value passed for non-openai/azure embedding calls
+    def _check_valid_arg(supported_params):
+        if len(non_default_params.keys()) > 0:
+            keys = list(non_default_params.keys())
+            for k in keys:
+                if (
+                    litellm.drop_params is True and k not in supported_params
+                ):  # drop the unsupported non-default values
+                    non_default_params.pop(k, None)
+                elif k not in supported_params:
+                    raise litellm.utils.UnsupportedParamsError(
+                        status_code=500,
+                        message="k={}, not supported by {}. Supported params={}. To drop it from the call, set `litellm.drop_params = True`.".format(
+                            k, custom_llm_provider, supported_params
+                        ),
+                    )
+            return non_default_params
+
+    if custom_llm_provider == "openai":
+        optional_params = non_default_params
+    elif custom_llm_provider == "azure":
+        supported_params = (
+            litellm.AzureOpenAIAssistantsAPIConfig().get_supported_openai_create_message_params()
+        )
+        _check_valid_arg(supported_params=supported_params)
+        optional_params = litellm.AzureOpenAIAssistantsAPIConfig().map_openai_params_create_message_params(
+            non_default_params=non_default_params, optional_params=optional_params
+        )
+    for k in passed_params.keys():
+        if k not in default_params.keys():
+            optional_params[k] = passed_params[k]
+    return optional_params
+
+
+def get_optional_params_image_gen(
+    n: Optional[int] = None,
+    quality: Optional[str] = None,
+    response_format: Optional[str] = None,
+    size: Optional[str] = None,
+    style: Optional[str] = None,
+    user: Optional[str] = None,
+    custom_llm_provider: Optional[str] = None,
+    **kwargs,
+):
+    # retrieve all parameters passed to the function
+    passed_params = locals()
+    custom_llm_provider = passed_params.pop("custom_llm_provider")
+    special_params = passed_params.pop("kwargs")
+    for k, v in special_params.items():
+        passed_params[k] = v
+
+    default_params = {
+        "n": None,
+        "quality": None,
+        "response_format": None,
+        "size": None,
+        "style": None,
+        "user": None,
+    }
+
+    non_default_params = {
+        k: v
+        for k, v in passed_params.items()
+        if (k in default_params and v != default_params[k])
+    }
+    optional_params = {}
+
+    ## raise exception if non-default value passed for non-openai/azure embedding calls
+    def _check_valid_arg(supported_params):
+        if len(non_default_params.keys()) > 0:
+            keys = list(non_default_params.keys())
+            for k in keys:
+                if (
+                    litellm.drop_params is True and k not in supported_params
+                ):  # drop the unsupported non-default values
+                    non_default_params.pop(k, None)
+                elif k not in supported_params:
+                    raise UnsupportedParamsError(
+                        status_code=500,
+                        message=f"Setting user/encoding format is not supported by {custom_llm_provider}. To drop it from the call, set `litellm.drop_params = True`.",
+                    )
+            return non_default_params
+
+    if (
+        custom_llm_provider == "openai"
+        or custom_llm_provider == "azure"
+        or custom_llm_provider in litellm.openai_compatible_providers
+    ):
+        optional_params = non_default_params
+    elif custom_llm_provider == "bedrock":
+        supported_params = ["size"]
+        _check_valid_arg(supported_params=supported_params)
+        if size is not None:
+            width, height = size.split("x")
+            optional_params["width"] = int(width)
+            optional_params["height"] = int(height)
+    elif custom_llm_provider == "vertex_ai":
+        supported_params = ["n"]
+        """
+        All params here: https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/imagegeneration?project=adroit-crow-413218
+        """
+        _check_valid_arg(supported_params=supported_params)
+        if n is not None:
+            optional_params["sampleCount"] = int(n)
+
+    for k in passed_params.keys():
+        if k not in default_params.keys():
+            optional_params[k] = passed_params[k]
+    return optional_params