diff --git a/docker-compose.override.yml.example b/docker-compose.override.yml.example index c34502344..3fa254ff7 100644 --- a/docker-compose.override.yml.example +++ b/docker-compose.override.yml.example @@ -122,6 +122,7 @@ version: '3.4' # - ./litellm/litellm-config.yaml:/app/config.yaml # command: [ "--config", "/app/config.yaml", "--port", "8000", "--num_workers", "8" ] # environment: +# OPENAI_API_KEY: none ## needs to be set if ollama's openai api compatibility is used # REDIS_HOST: redis # REDIS_PORT: 6379 # REDIS_PASSWORD: RedisChangeMe diff --git a/docs/install/configuration/litellm.md b/docs/install/configuration/litellm.md index 04fba6c5e..d0f4ca427 100644 --- a/docs/install/configuration/litellm.md +++ b/docs/install/configuration/litellm.md @@ -48,13 +48,13 @@ model_list: rpm: 1440 - model_name: mixtral litellm_params: - model: ollama/mixtral:8x7b-instruct-v0.1-q5_K_M - api_base: http://ollama:11434 + model: openai/mixtral:8x7b-instruct-v0.1-q5_K_M # use openai/* for ollama's openai api compatibility + api_base: http://ollama:11434/v1 stream: True - model_name: mistral litellm_params: - model: ollama/mistral - api_base: http://ollama:11434 + model: openai/mistral # use openai/* for ollama's openai api compatibility + api_base: http://ollama:11434/v1 stream: True litellm_settings: success_callback: ["langfuse"] @@ -95,4 +95,4 @@ Key components and features include: - **Deployment and Performance**: Information on deploying LiteLLM Proxy and its performance metrics. - **Proxy CLI Arguments**: A wide range of command-line arguments for customization. -Overall, LiteLLM Server offers a comprehensive suite of tools for managing, deploying, and interacting with a variety of LLMs, making it a versatile choice for large-scale AI applications. \ No newline at end of file +Overall, LiteLLM Server offers a comprehensive suite of tools for managing, deploying, and interacting with a variety of LLMs, making it a versatile choice for large-scale AI applications.