Add .env file for environment variables and update docker-compose.yml to use it

- Introduced a new .env file containing sensitive environment variables and configuration settings.
- Updated docker-compose.yml to reference the new .env file for environment variables instead of stack.env.
- Adjusted volume paths and network configurations for improved clarity and organization.
- Commented out legacy model configurations in model_config.yaml for future reference.
This commit is contained in:
2026-01-13 23:43:41 +00:00
parent 2dd2525621
commit ecf85dce4a
3 changed files with 91 additions and 84 deletions

27
.env Normal file
View File

@@ -0,0 +1,27 @@
# GENERIC_CLIENT_ID = "******"
# GENERIC_CLIENT_SECRET = "G*******"
# GENERIC_AUTHORIZATION_ENDPOINT = "http://localhost:9090/auth"
# GENERIC_TOKEN_ENDPOINT = "http://localhost:9090/token"
# GENERIC_USERINFO_ENDPOINT = "http://localhost:9090/me"
# redirect_uri = "http://localhost:4000/sso/callback"
# user_id = PROXY_ADMIN_ID
DATABASE_URL=postgresql://vasceannie:squirtle123456@192.168.50.210:5432/sandbox?schema=pvgo7i70amd9xkx
LANGFUSE_SECRET_KEY=sk-lf-a623bd79-35ab-4d45-a513-f044bc40ee36
LANGFUSE_PUBLIC_KEY=pk-lf-316bfcaf-ff23-4832-b4ab-009b34607bb3
LANGFUSE_BASE_URL=http://fuse.lab
UI_USERNAME=vasceannie
UI_PASSWORD=$$QUirtle123
LITELLM_MASTER_KEY=sk-1234
LITELLM_SALT_KEY=4brKp9QKqdv6fXGeaU9mtWrqs6orQ_h9Et3GFpA9Xew
STORE_MODEL_IN_DB=True
ANTHROPIC_API_KEY=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
OPENAI_API_KEY=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
GEMINI_API_KEY=AIzaSyAWpWkSQaCpOJlBklt73Bei-bjiaiaork8
DEEPGRAM_API_KEY=dc04258e36b211efc498efdfe4732840e96b6ed0
DEEPSEEK_API_KEY=sk-607e220f1cc4425ca94473c0f3e2e05b
ELEVENLABS_API_KEY=sk_9a8e3153ba1e3133a4d87cd0dafa723e24d997ab3a0b8870
FIREWORKS_AI_API_KEY=fw_3ZGskTSRRq5yYGnLbJxyGpTS
JINA_AI_API_KEY=jina_ab9d001a9dfb44228012d787a1b567edIMW1T2Itgr3UqnEYvAmddTtTvIkg
COHERE_API_KEY=Zx9TS3woEdUrFNpJv7ysM7yN3Bm85Wpq0KTdCUKp
PERPLEXITYAI_API_KEY=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
USE_PRISMA_MIGRATE=True

View File

@@ -1,31 +1,31 @@
model_list: # model_list:
- model_name: "*" # - model_name: "*"
litellm_params: # litellm_params:
model: "*" # model: "*"
- model_name: "openai/*" # - model_name: "openai/*"
litellm_params: # litellm_params:
model: "openai/*" # model: "openai/*"
api_key: os.environ/OPENAI_API_KEY # api_key: os.environ/OPENAI_API_KEY
litellm_settings: # litellm_settings:
check_provider_endpoint: true # check_provider_endpoint: true
- model_name: "anthropic/*" # - model_name: "anthropic/*"
litellm_params: # litellm_params:
model: "anthropic/*" # model: "anthropic/*"
api_key: os.environ/ANTHROPIC_API_KEY # api_key: os.environ/ANTHROPIC_API_KEY
litellm_settings: # litellm_settings:
check_provider_endpoint: true # check_provider_endpoint: true
- model_name: "gemini/*" # - model_name: "gemini/*"
litellm_params: # litellm_params:
model: "gemini/*" # model: "gemini/*"
api_key: os.environ/GEMINI_API_KEY # api_key: os.environ/GEMINI_API_KEY
litellm_settings: # litellm_settings:
check_provider_endpoint: true # check_provider_endpoint: true
- model_name: nova-2 # - model_name: nova-2
litellm_params: # litellm_params:
model: deepgram/nova-2 # model: deepgram/nova-2
api_key: os.environ/DEEPGRAM_API_KEY # api_key: os.environ/DEEPGRAM_API_KEY
model_info: # model_info:
mode: audio_transcription # mode: audio_transcription

View File

@@ -2,42 +2,44 @@ version: '3.2'
services: services:
litellm: litellm:
container_name: litellm
image: litellm/litellm:latest image: litellm/litellm:latest
restart: unless-stopped restart: unless-stopped
env_file: env_file:
- stack.env - .env
volumes: volumes:
- /home/trav/dkr/litellm/config/config.yaml:/app/config.yaml # - /home/trav/dkr/litellm/config/config.yaml:/app/config.yaml
- /home/trav/dkr/litellm/config/model_config.yaml:/app/model_config.yaml # - /home/trav/dkr/litellm/config/model_config.yaml:/app/model_config.yaml
- /home/trav/dkr/litellm/config/litellm_settings.yaml:/app/litellm_settings.yaml # - /home/trav/dkr/litellm/config/litellm_settings.yaml:/app/litellm_settings.yaml
- /home/trav/dkr/litellm/config/router_settings.yaml:/app/router_settings.yaml # - /home/trav/dkr/litellm/config/router_settings.yaml:/app/router_settings.yaml
- /home/trav/dkr/litellm/config/general_settings.yaml:/app/general_settings.yaml # - /home/trav/dkr/litellm/config/general_settings.yaml:/app/general_settings.yaml
# - ./config/config.yaml:/app/config.yaml - ./config/config.yaml:/app/config.yaml
# - ./config/model_config.yaml:/app/model_config.yaml - ./config/model_config.yaml:/app/model_config.yaml
# - ./config/litellm_settings.yaml:/app/litellm_settings.yaml - ./config/litellm_settings.yaml:/app/litellm_settings.yaml
# - ./config/router_settings.yaml:/app/router_settings.yaml - ./config/router_settings.yaml:/app/router_settings.yaml
# - ./config/general_settings.yaml:/app/general_settings.yaml - ./config/general_settings.yaml:/app/general_settings.yaml
command: command:
- "--config=/app/config.yaml" - "--config=/app/config.yaml"
# environment: environment:
# DATABASE_URL: ${DATABASE_URL} DATABASE_URL: ${DATABASE_URL}
# LITELLM_MASTER_KEY: ${LITELLM_MASTER_KEY} LITELLM_MASTER_KEY: ${LITELLM_MASTER_KEY}
# LITELLM_SALT_KEY: ${LITELLM_SALT_KEY} LITELLM_SALT_KEY: ${LITELLM_SALT_KEY}
# UI_USERNAME: ${UI_USERNAME} UI_USERNAME: ${UI_USERNAME}
# UI_PASSWORD: ${UI_PASSWORD} UI_PASSWORD: ${UI_PASSWORD}
# STORE_MODEL_IN_DB: "True" STORE_MODEL_IN_DB: "True"
# # Provider Keys # Provider Keys
# OPENAI_API_KEY: ${OPENAI_API_KEY} OPENAI_API_KEY: ${OPENAI_API_KEY}
# COHERE_API_KEY: ${COHERE_API_KEY} COHERE_API_KEY: ${COHERE_API_KEY}
# ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY} ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY}
# LITELLM_LOG: ${LITELLM_LOG:-WARN} LITELLM_LOG: ${LITELLM_LOG:-WARN}
# LANGFUSE_PUBLIC_KEY: ${LANGFUSE_PUBLIC_KEY} LANGFUSE_PUBLIC_KEY: ${LANGFUSE_PUBLIC_KEY}
# LANGFUSE_SECRET_KEY: ${LANGFUSE_SECRET_KEY} LANGFUSE_SECRET_KEY: ${LANGFUSE_SECRET_KEY}
# LANGFUSE_OTEL_HOST: ${LANGFUSE_OTEL_HOST} LANGFUSE_BASE_URL: ${LANGFUSE_OTEL_HOST}
LANGFUSE_OTEL_HOST: ${LANGFUSE_BASE_URL}
USE_PRISMA_MIGRATE: ${USE_PRISMA_MIGRATE}
networks: networks:
- net - llm-net
- badge-net - proxy-little
- public
expose: expose:
- 4000 - 4000
healthcheck: healthcheck:
@@ -47,32 +49,10 @@ services:
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 3 retries: 3
deploy:
replicas: 1
update_config:
parallelism: 1
delay: 10s
failure_action: rollback
placement:
constraints:
- node.hostname == little
labels:
- "traefik.enable=true"
- "traefik.swarm.network=public"
- "traefik.http.routers.litellm.entrypoints=web"
- "traefik.http.routers.litellm.rule=Host(`llm.lab`) || Host(`llm.toy`)"
- "traefik.http.routers.litellm.service=litellm"
- "traefik.http.services.litellm.loadbalancer.server.port=4000"
# Health check using unauthenticated endpoint
- "traefik.http.services.litellm.loadbalancer.healthcheck.path=/health/liveliness"
- "traefik.http.services.litellm.loadbalancer.healthcheck.interval=30s"
- "traefik.http.services.litellm.loadbalancer.healthcheck.timeout=5s"
networks: networks:
net: llm-net:
driver: overlay driver: bridge
attachable: true name: llm-net
public: proxy-little:
external: true external: true
badge-net:
external: true