aboutsummaryrefslogtreecommitdiff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py21
1 files changed, 21 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py b/.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py
new file mode 100644
index 00000000..0f4490cb
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/deepseek/cost_calculator.py
@@ -0,0 +1,21 @@
+"""
+Cost calculator for DeepSeek Chat models.
+
+Handles prompt caching scenario.
+"""
+
+from typing import Tuple
+
+from litellm.litellm_core_utils.llm_cost_calc.utils import generic_cost_per_token
+from litellm.types.utils import Usage
+
+
+def cost_per_token(model: str, usage: Usage) -> Tuple[float, float]:
+ """
+ Calculates the cost per token for a given model, prompt tokens, and completion tokens.
+
+ Follows the same logic as Anthropic's cost per token calculation.
+ """
+ return generic_cost_per_token(
+ model=model, usage=usage, custom_llm_provider="deepseek"
+ )