about summary refs log tree commit diff
path: root/.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py
diff options
context:
space:
mode:
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py')
-rw-r--r--.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py15
1 files changed, 15 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py b/.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py
new file mode 100644
index 00000000..ec4c07e9
--- /dev/null
+++ b/.venv/lib/python3.12/site-packages/litellm/llms/vllm/completion/transformation.py
@@ -0,0 +1,15 @@
+"""
+Translates from OpenAI's `/v1/chat/completions` to the VLLM sdk `llm.generate`. 
+
+NOT RECOMMENDED FOR PRODUCTION USE. Use `hosted_vllm/` instead.
+"""
+
+from ...hosted_vllm.chat.transformation import HostedVLLMChatConfig
+
+
+class VLLMConfig(HostedVLLMChatConfig):
+    """
+    VLLM SDK supports the same OpenAI params as hosted_vllm.
+    """
+
+    pass