diff options
author | S. Solomon Darnell | 2025-03-28 21:52:21 -0500 |
---|---|---|
committer | S. Solomon Darnell | 2025-03-28 21:52:21 -0500 |
commit | 4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch) | |
tree | ee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py | |
parent | cc961e04ba734dd72309fb548a2f97d67d578813 (diff) | |
download | gn-ai-master.tar.gz |
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py')
-rw-r--r-- | .venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py | 101 |
1 files changed, 101 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py new file mode 100644 index 00000000..c4dbd8d0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py @@ -0,0 +1,101 @@ +""" +Translate from OpenAI's `/v1/audio/transcriptions` to Groq's `/v1/audio/transcriptions` +""" + +import types +from typing import List, Optional, Union + +import litellm + + +class GroqSTTConfig: + + frequency_penalty: Optional[int] = None + function_call: Optional[Union[str, dict]] = None + functions: Optional[list] = None + logit_bias: Optional[dict] = None + max_tokens: Optional[int] = None + n: Optional[int] = None + presence_penalty: Optional[int] = None + stop: Optional[Union[str, list]] = None + temperature: Optional[int] = None + top_p: Optional[int] = None + response_format: Optional[dict] = None + tools: Optional[list] = None + tool_choice: Optional[Union[str, dict]] = None + + def __init__( + self, + frequency_penalty: Optional[int] = None, + function_call: Optional[Union[str, dict]] = None, + functions: Optional[list] = None, + logit_bias: Optional[dict] = None, + max_tokens: Optional[int] = None, + n: Optional[int] = None, + presence_penalty: Optional[int] = None, + stop: Optional[Union[str, list]] = None, + temperature: Optional[int] = None, + top_p: Optional[int] = None, + response_format: Optional[dict] = None, + tools: Optional[list] = None, + tool_choice: Optional[Union[str, dict]] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params_stt(self): + return [ + "prompt", + "response_format", + "temperature", + "language", + ] + + def get_supported_openai_response_formats_stt(self) -> List[str]: + return ["json", "verbose_json", "text"] + + def map_openai_params_stt( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + response_formats = self.get_supported_openai_response_formats_stt() + for param, value in non_default_params.items(): + if param == "response_format": + if value in response_formats: + optional_params[param] = value + else: + if litellm.drop_params is True or drop_params is True: + pass + else: + raise litellm.utils.UnsupportedParamsError( + message="Groq doesn't support response_format={}. To drop unsupported openai params from the call, set `litellm.drop_params = True`".format( + value + ), + status_code=400, + ) + else: + optional_params[param] = value + return optional_params |