aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py
diff options
context:
space:
mode:
authorS. Solomon Darnell2025-03-28 21:52:21 -0500
committerS. Solomon Darnell2025-03-28 21:52:21 -0500
commit4a52a71956a8d46fcb7294ac71734504bb09bcc2 (patch)
treeee3dc5af3b6313e921cd920906356f5d4febc4ed /.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py
parentcc961e04ba734dd72309fb548a2f97d67d578813 (diff)
downloadgn-ai-4a52a71956a8d46fcb7294ac71734504bb09bcc2.tar.gz
two version of R2R are hereHEADmaster
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py49
1 files changed, 49 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py b/.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py
new file mode 100644
index 00000000..5b34ef0c
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/integrations/custom_prompt_management.py
@@ -0,0 +1,49 @@
+from typing import List, Optional, Tuple
+
+from litellm.integrations.custom_logger import CustomLogger
+from litellm.integrations.prompt_management_base import (
+ PromptManagementBase,
+ PromptManagementClient,
+)
+from litellm.types.llms.openai import AllMessageValues
+from litellm.types.utils import StandardCallbackDynamicParams
+
+
+class CustomPromptManagement(CustomLogger, PromptManagementBase):
+ def get_chat_completion_prompt(
+ self,
+ model: str,
+ messages: List[AllMessageValues],
+ non_default_params: dict,
+ prompt_id: str,
+ prompt_variables: Optional[dict],
+ dynamic_callback_params: StandardCallbackDynamicParams,
+ ) -> Tuple[str, List[AllMessageValues], dict]:
+ """
+ Returns:
+ - model: str - the model to use (can be pulled from prompt management tool)
+ - messages: List[AllMessageValues] - the messages to use (can be pulled from prompt management tool)
+ - non_default_params: dict - update with any optional params (e.g. temperature, max_tokens, etc.) to use (can be pulled from prompt management tool)
+ """
+ return model, messages, non_default_params
+
+ @property
+ def integration_name(self) -> str:
+ return "custom-prompt-management"
+
+ def should_run_prompt_management(
+ self,
+ prompt_id: str,
+ dynamic_callback_params: StandardCallbackDynamicParams,
+ ) -> bool:
+ return True
+
+ def _compile_prompt_helper(
+ self,
+ prompt_id: str,
+ prompt_variables: Optional[dict],
+ dynamic_callback_params: StandardCallbackDynamicParams,
+ ) -> PromptManagementClient:
+ raise NotImplementedError(
+ "Custom prompt management does not support compile prompt helper"
+ )