From 09cd1a7e7455b739e9a5dff9fa4d822e369d9c40 Mon Sep 17 00:00:00 2001 From: Marius <90092216+mariusgau@users.noreply.github.com> Date: Thu, 4 Apr 2024 14:32:36 +0200 Subject: [PATCH] =?UTF-8?q?=F0=9F=A6=99=20docs:=20Update=20Ollama=20+=20Li?= =?UTF-8?q?teLLM=20Instructions=20(#2302)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update litellm.md * set OPENAI_API_KEY of litellm service (needs to be set if ollama's openai api compatibility is used) --- docker-compose.override.yml.example | 1 + docs/install/configuration/litellm.md | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docker-compose.override.yml.example b/docker-compose.override.yml.example index c34502344..3fa254ff7 100644 --- a/docker-compose.override.yml.example +++ b/docker-compose.override.yml.example @@ -122,6 +122,7 @@ version: '3.4' # - ./litellm/litellm-config.yaml:/app/config.yaml # command: [ "--config", "/app/config.yaml", "--port", "8000", "--num_workers", "8" ] # environment: +# OPENAI_API_KEY: none ## needs to be set if ollama's openai api compatibility is used # REDIS_HOST: redis # REDIS_PORT: 6379 # REDIS_PASSWORD: RedisChangeMe diff --git a/docs/install/configuration/litellm.md b/docs/install/configuration/litellm.md index 04fba6c5e..d0f4ca427 100644 --- a/docs/install/configuration/litellm.md +++ b/docs/install/configuration/litellm.md @@ -48,13 +48,13 @@ model_list: rpm: 1440 - model_name: mixtral litellm_params: - model: ollama/mixtral:8x7b-instruct-v0.1-q5_K_M - api_base: http://ollama:11434 + model: openai/mixtral:8x7b-instruct-v0.1-q5_K_M # use openai/* for ollama's openai api compatibility + api_base: http://ollama:11434/v1 stream: True - model_name: mistral litellm_params: - model: ollama/mistral - api_base: http://ollama:11434 + model: openai/mistral # use openai/* for ollama's openai api compatibility + api_base: http://ollama:11434/v1 stream: True litellm_settings: success_callback: ["langfuse"] @@ -95,4 +95,4 @@ Key components and features include: - **Deployment and Performance**: Information on deploying LiteLLM Proxy and its performance metrics. - **Proxy CLI Arguments**: A wide range of command-line arguments for customization. -Overall, LiteLLM Server offers a comprehensive suite of tools for managing, deploying, and interacting with a variety of LLMs, making it a versatile choice for large-scale AI applications. \ No newline at end of file +Overall, LiteLLM Server offers a comprehensive suite of tools for managing, deploying, and interacting with a variety of LLMs, making it a versatile choice for large-scale AI applications.