aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-master.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py101
1 files changed, 101 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py
new file mode 100644
index 00000000..c4dbd8d0
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/groq/stt/transformation.py
@@ -0,0 +1,101 @@
+"""
+Translate from OpenAI's `/v1/audio/transcriptions` to Groq's `/v1/audio/transcriptions`
+"""
+
+import types
+from typing import List, Optional, Union
+
+import litellm
+
+
+class GroqSTTConfig:
+
+ frequency_penalty: Optional[int] = None
+ function_call: Optional[Union[str, dict]] = None
+ functions: Optional[list] = None
+ logit_bias: Optional[dict] = None
+ max_tokens: Optional[int] = None
+ n: Optional[int] = None
+ presence_penalty: Optional[int] = None
+ stop: Optional[Union[str, list]] = None
+ temperature: Optional[int] = None
+ top_p: Optional[int] = None
+ response_format: Optional[dict] = None
+ tools: Optional[list] = None
+ tool_choice: Optional[Union[str, dict]] = None
+
+ def __init__(
+ self,
+ frequency_penalty: Optional[int] = None,
+ function_call: Optional[Union[str, dict]] = None,
+ functions: Optional[list] = None,
+ logit_bias: Optional[dict] = None,
+ max_tokens: Optional[int] = None,
+ n: Optional[int] = None,
+ presence_penalty: Optional[int] = None,
+ stop: Optional[Union[str, list]] = None,
+ temperature: Optional[int] = None,
+ top_p: Optional[int] = None,
+ response_format: Optional[dict] = None,
+ tools: Optional[list] = None,
+ tool_choice: Optional[Union[str, dict]] = None,
+ ) -> None:
+ locals_ = locals().copy()
+ for key, value in locals_.items():
+ if key != "self" and value is not None:
+ setattr(self.__class__, key, value)
+
+ @classmethod
+ def get_config(cls):
+ return {
+ k: v
+ for k, v in cls.__dict__.items()
+ if not k.startswith("__")
+ and not isinstance(
+ v,
+ (
+ types.FunctionType,
+ types.BuiltinFunctionType,
+ classmethod,
+ staticmethod,
+ ),
+ )
+ and v is not None
+ }
+
+ def get_supported_openai_params_stt(self):
+ return [
+ "prompt",
+ "response_format",
+ "temperature",
+ "language",
+ ]
+
+ def get_supported_openai_response_formats_stt(self) -> List[str]:
+ return ["json", "verbose_json", "text"]
+
+ def map_openai_params_stt(
+ self,
+ non_default_params: dict,
+ optional_params: dict,
+ model: str,
+ drop_params: bool,
+ ) -> dict:
+ response_formats = self.get_supported_openai_response_formats_stt()
+ for param, value in non_default_params.items():
+ if param == "response_format":
+ if value in response_formats:
+ optional_params[param] = value
+ else:
+ if litellm.drop_params is True or drop_params is True:
+ pass
+ else:
+ raise litellm.utils.UnsupportedParamsError(
+ message="Groq doesn't support response_format={}. To drop unsupported openai params from the call, set `litellm.drop_params = True`".format(
+ value
+ ),
+ status_code=400,
+ )
+ else:
+ optional_params[param] = value
+ return optional_params