diff options
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/proxy/common_utils/openai_endpoint_utils.py')
-rw-r--r-- | .venv/lib/python3.12/site-packages/litellm/proxy/common_utils/openai_endpoint_utils.py | 39 |
1 files changed, 39 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/proxy/common_utils/openai_endpoint_utils.py b/.venv/lib/python3.12/site-packages/litellm/proxy/common_utils/openai_endpoint_utils.py new file mode 100644 index 00000000..316a8427 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/litellm/proxy/common_utils/openai_endpoint_utils.py @@ -0,0 +1,39 @@ +""" +Contains utils used by OpenAI compatible endpoints +""" + +from typing import Optional + +from fastapi import Request + +from litellm.proxy.common_utils.http_parsing_utils import _read_request_body + + +def remove_sensitive_info_from_deployment(deployment_dict: dict) -> dict: + """ + Removes sensitive information from a deployment dictionary. + + Args: + deployment_dict (dict): The deployment dictionary to remove sensitive information from. + + Returns: + dict: The modified deployment dictionary with sensitive information removed. + """ + deployment_dict["litellm_params"].pop("api_key", None) + deployment_dict["litellm_params"].pop("vertex_credentials", None) + deployment_dict["litellm_params"].pop("aws_access_key_id", None) + deployment_dict["litellm_params"].pop("aws_secret_access_key", None) + + return deployment_dict + + +async def get_custom_llm_provider_from_request_body(request: Request) -> Optional[str]: + """ + Get the `custom_llm_provider` from the request body + + Safely reads the request body + """ + request_body: dict = await _read_request_body(request=request) or {} + if "custom_llm_provider" in request_body: + return request_body["custom_llm_provider"] + return None |