diff --git a/.env b/.env new file mode 100644 index 0000000..535e608 --- /dev/null +++ b/.env @@ -0,0 +1,27 @@ +# GENERIC_CLIENT_ID = "******" +# GENERIC_CLIENT_SECRET = "G*******" +# GENERIC_AUTHORIZATION_ENDPOINT = "http://localhost:9090/auth" +# GENERIC_TOKEN_ENDPOINT = "http://localhost:9090/token" +# GENERIC_USERINFO_ENDPOINT = "http://localhost:9090/me" +# redirect_uri = "http://localhost:4000/sso/callback" +# user_id = PROXY_ADMIN_ID +DATABASE_URL=postgresql://vasceannie:squirtle123456@192.168.50.210:5432/sandbox?schema=pvgo7i70amd9xkx +LANGFUSE_SECRET_KEY=sk-lf-a623bd79-35ab-4d45-a513-f044bc40ee36 +LANGFUSE_PUBLIC_KEY=pk-lf-316bfcaf-ff23-4832-b4ab-009b34607bb3 +LANGFUSE_BASE_URL=http://fuse.lab +UI_USERNAME=vasceannie +UI_PASSWORD=$$QUirtle123 +LITELLM_MASTER_KEY=sk-1234 +LITELLM_SALT_KEY=4brKp9QKqdv6fXGeaU9mtWrqs6orQ_h9Et3GFpA9Xew +STORE_MODEL_IN_DB=True +ANTHROPIC_API_KEY=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 +OPENAI_API_KEY=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 +GEMINI_API_KEY=AIzaSyAWpWkSQaCpOJlBklt73Bei-bjiaiaork8 +DEEPGRAM_API_KEY=dc04258e36b211efc498efdfe4732840e96b6ed0 +DEEPSEEK_API_KEY=sk-607e220f1cc4425ca94473c0f3e2e05b +ELEVENLABS_API_KEY=sk_9a8e3153ba1e3133a4d87cd0dafa723e24d997ab3a0b8870 +FIREWORKS_AI_API_KEY=fw_3ZGskTSRRq5yYGnLbJxyGpTS +JINA_AI_API_KEY=jina_ab9d001a9dfb44228012d787a1b567edIMW1T2Itgr3UqnEYvAmddTtTvIkg +COHERE_API_KEY=Zx9TS3woEdUrFNpJv7ysM7yN3Bm85Wpq0KTdCUKp +PERPLEXITYAI_API_KEY=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 +USE_PRISMA_MIGRATE=True \ No newline at end of file diff --git a/config/model_config.yaml b/config/model_config.yaml index 6844aeb..b04c4ff 100644 --- a/config/model_config.yaml +++ b/config/model_config.yaml @@ -1,31 +1,31 @@ -model_list: - - model_name: "*" - litellm_params: - model: "*" - - model_name: "openai/*" - litellm_params: - model: "openai/*" - api_key: os.environ/OPENAI_API_KEY - litellm_settings: - check_provider_endpoint: true +# model_list: +# - model_name: "*" +# litellm_params: +# model: "*" +# - model_name: "openai/*" +# litellm_params: +# model: "openai/*" +# api_key: os.environ/OPENAI_API_KEY +# litellm_settings: +# check_provider_endpoint: true - - model_name: "anthropic/*" - litellm_params: - model: "anthropic/*" - api_key: os.environ/ANTHROPIC_API_KEY - litellm_settings: - check_provider_endpoint: true +# - model_name: "anthropic/*" +# litellm_params: +# model: "anthropic/*" +# api_key: os.environ/ANTHROPIC_API_KEY +# litellm_settings: +# check_provider_endpoint: true - - model_name: "gemini/*" - litellm_params: - model: "gemini/*" - api_key: os.environ/GEMINI_API_KEY - litellm_settings: - check_provider_endpoint: true +# - model_name: "gemini/*" +# litellm_params: +# model: "gemini/*" +# api_key: os.environ/GEMINI_API_KEY +# litellm_settings: +# check_provider_endpoint: true - - model_name: nova-2 - litellm_params: - model: deepgram/nova-2 - api_key: os.environ/DEEPGRAM_API_KEY - model_info: - mode: audio_transcription \ No newline at end of file +# - model_name: nova-2 +# litellm_params: +# model: deepgram/nova-2 +# api_key: os.environ/DEEPGRAM_API_KEY +# model_info: +# mode: audio_transcription \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 79a5a84..adbe639 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,42 +2,44 @@ version: '3.2' services: litellm: + container_name: litellm image: litellm/litellm:latest restart: unless-stopped env_file: - - stack.env + - .env volumes: - - /home/trav/dkr/litellm/config/config.yaml:/app/config.yaml - - /home/trav/dkr/litellm/config/model_config.yaml:/app/model_config.yaml - - /home/trav/dkr/litellm/config/litellm_settings.yaml:/app/litellm_settings.yaml - - /home/trav/dkr/litellm/config/router_settings.yaml:/app/router_settings.yaml - - /home/trav/dkr/litellm/config/general_settings.yaml:/app/general_settings.yaml - # - ./config/config.yaml:/app/config.yaml - # - ./config/model_config.yaml:/app/model_config.yaml - # - ./config/litellm_settings.yaml:/app/litellm_settings.yaml - # - ./config/router_settings.yaml:/app/router_settings.yaml - # - ./config/general_settings.yaml:/app/general_settings.yaml + # - /home/trav/dkr/litellm/config/config.yaml:/app/config.yaml + # - /home/trav/dkr/litellm/config/model_config.yaml:/app/model_config.yaml + # - /home/trav/dkr/litellm/config/litellm_settings.yaml:/app/litellm_settings.yaml + # - /home/trav/dkr/litellm/config/router_settings.yaml:/app/router_settings.yaml + # - /home/trav/dkr/litellm/config/general_settings.yaml:/app/general_settings.yaml + - ./config/config.yaml:/app/config.yaml + - ./config/model_config.yaml:/app/model_config.yaml + - ./config/litellm_settings.yaml:/app/litellm_settings.yaml + - ./config/router_settings.yaml:/app/router_settings.yaml + - ./config/general_settings.yaml:/app/general_settings.yaml command: - "--config=/app/config.yaml" - # environment: - # DATABASE_URL: ${DATABASE_URL} - # LITELLM_MASTER_KEY: ${LITELLM_MASTER_KEY} - # LITELLM_SALT_KEY: ${LITELLM_SALT_KEY} - # UI_USERNAME: ${UI_USERNAME} - # UI_PASSWORD: ${UI_PASSWORD} - # STORE_MODEL_IN_DB: "True" - # # Provider Keys - # OPENAI_API_KEY: ${OPENAI_API_KEY} - # COHERE_API_KEY: ${COHERE_API_KEY} - # ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY} - # LITELLM_LOG: ${LITELLM_LOG:-WARN} - # LANGFUSE_PUBLIC_KEY: ${LANGFUSE_PUBLIC_KEY} - # LANGFUSE_SECRET_KEY: ${LANGFUSE_SECRET_KEY} - # LANGFUSE_OTEL_HOST: ${LANGFUSE_OTEL_HOST} + environment: + DATABASE_URL: ${DATABASE_URL} + LITELLM_MASTER_KEY: ${LITELLM_MASTER_KEY} + LITELLM_SALT_KEY: ${LITELLM_SALT_KEY} + UI_USERNAME: ${UI_USERNAME} + UI_PASSWORD: ${UI_PASSWORD} + STORE_MODEL_IN_DB: "True" + # Provider Keys + OPENAI_API_KEY: ${OPENAI_API_KEY} + COHERE_API_KEY: ${COHERE_API_KEY} + ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY} + LITELLM_LOG: ${LITELLM_LOG:-WARN} + LANGFUSE_PUBLIC_KEY: ${LANGFUSE_PUBLIC_KEY} + LANGFUSE_SECRET_KEY: ${LANGFUSE_SECRET_KEY} + LANGFUSE_BASE_URL: ${LANGFUSE_OTEL_HOST} + LANGFUSE_OTEL_HOST: ${LANGFUSE_BASE_URL} + USE_PRISMA_MIGRATE: ${USE_PRISMA_MIGRATE} networks: - - net - - badge-net - - public + - llm-net + - proxy-little expose: - 4000 healthcheck: @@ -47,32 +49,10 @@ services: interval: 30s timeout: 10s retries: 3 - deploy: - replicas: 1 - update_config: - parallelism: 1 - delay: 10s - failure_action: rollback - placement: - constraints: - - node.hostname == little - labels: - - "traefik.enable=true" - - "traefik.swarm.network=public" - - "traefik.http.routers.litellm.entrypoints=web" - - "traefik.http.routers.litellm.rule=Host(`llm.lab`) || Host(`llm.toy`)" - - "traefik.http.routers.litellm.service=litellm" - - "traefik.http.services.litellm.loadbalancer.server.port=4000" - # Health check using unauthenticated endpoint - - "traefik.http.services.litellm.loadbalancer.healthcheck.path=/health/liveliness" - - "traefik.http.services.litellm.loadbalancer.healthcheck.interval=30s" - - "traefik.http.services.litellm.loadbalancer.healthcheck.timeout=5s" networks: - net: - driver: overlay - attachable: true - public: - external: true - badge-net: - external: true + llm-net: + driver: bridge + name: llm-net + proxy-little: + external: true \ No newline at end of file