diff options
Diffstat (limited to '.venv/lib/python3.12/site-packages/litellm/proxy/example_config_yaml/load_balancer.yaml')
-rw-r--r-- | .venv/lib/python3.12/site-packages/litellm/proxy/example_config_yaml/load_balancer.yaml | 28 |
1 files changed, 28 insertions, 0 deletions
diff --git a/.venv/lib/python3.12/site-packages/litellm/proxy/example_config_yaml/load_balancer.yaml b/.venv/lib/python3.12/site-packages/litellm/proxy/example_config_yaml/load_balancer.yaml new file mode 100644 index 00000000..502b90ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/litellm/proxy/example_config_yaml/load_balancer.yaml @@ -0,0 +1,28 @@ +litellm_settings: + drop_params: True + +# Model-specific settings +model_list: # use the same model_name for using the litellm router. LiteLLM will use the router between gpt-3.5-turbo + - model_name: gpt-3.5-turbo # litellm will + litellm_params: + model: gpt-3.5-turbo + api_key: sk-uj6F + tpm: 20000 # [OPTIONAL] REPLACE with your openai tpm + rpm: 3 # [OPTIONAL] REPLACE with your openai rpm + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo + api_key: sk-Imn + tpm: 20000 # [OPTIONAL] REPLACE with your openai tpm + rpm: 3 # [OPTIONAL] REPLACE with your openai rpm + - model_name: gpt-3.5-turbo + litellm_params: + model: openrouter/gpt-3.5-turbo + - model_name: mistral-7b-instruct + litellm_params: + model: mistralai/mistral-7b-instruct + +environment_variables: + REDIS_HOST: localhost + REDIS_PASSWORD: + REDIS_PORT:
\ No newline at end of file |