Compare commits
89 Commits
docs-crisp
...
v0.7.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdab1e9cda | ||
|
|
3df4fac118 | ||
|
|
0ae98ff011 | ||
|
|
4d05e5b79a | ||
|
|
199f9f32e6 | ||
|
|
f94a782b4f | ||
|
|
738207de50 | ||
|
|
c96f067689 | ||
|
|
3bfd185cab | ||
|
|
c937b8cd07 | ||
|
|
6db91978ca | ||
|
|
8c22bb1d3d | ||
|
|
5d642d0187 | ||
|
|
4196a86fa9 | ||
|
|
e6310c806a | ||
|
|
3d1dec62a4 | ||
|
|
de3987cbaf | ||
|
|
f406a85633 | ||
|
|
692ce3b346 | ||
|
|
26ea990045 | ||
|
|
265abbc1c8 | ||
|
|
0b7da72be6 | ||
|
|
3c184e9410 | ||
|
|
bf4e64ce63 | ||
|
|
9d854dac07 | ||
|
|
fce7246ac1 | ||
|
|
2cc580ba52 | ||
|
|
d2d9ac0280 | ||
|
|
f380f261a5 | ||
|
|
9d137ce42f | ||
|
|
25f92dd1c3 | ||
|
|
9277e2a0c5 | ||
|
|
c19dfddd0f | ||
|
|
0fe47cf1f8 | ||
|
|
8e5f1ad575 | ||
|
|
f64a2cb0b0 | ||
|
|
e4c07eb895 | ||
|
|
2240fee44a | ||
|
|
cb64b84846 | ||
|
|
cc71125fa1 | ||
|
|
6f0eb35365 | ||
|
|
3411d7a543 | ||
|
|
caabab4489 | ||
|
|
0b165260f7 | ||
|
|
334b603247 | ||
|
|
476767355b | ||
|
|
e80debb704 | ||
|
|
549026f677 | ||
|
|
f6a84887e1 | ||
|
|
fb80af05be | ||
|
|
cd7f3a51e1 | ||
|
|
daa5f43ac6 | ||
|
|
d0d8e47ec8 | ||
|
|
09cd1a7e74 | ||
|
|
94950b6e8b | ||
|
|
e418edd3dc | ||
|
|
e3c236ba3b | ||
|
|
7bd03a6e70 | ||
|
|
f146db5c59 | ||
|
|
9922baf7d1 | ||
|
|
09da05afa1 | ||
|
|
e66aa280c0 | ||
|
|
ed17e17a73 | ||
|
|
30d084e696 | ||
|
|
93af814596 | ||
|
|
1bafe80e78 | ||
|
|
49753a35e5 | ||
|
|
1605ef3793 | ||
|
|
8b3f80fe24 | ||
|
|
038063d4d1 | ||
|
|
5c8b16fbaf | ||
|
|
aff219c655 | ||
|
|
d07396d308 | ||
|
|
cc92597f14 | ||
|
|
4854b39f41 | ||
|
|
bb8a40dd98 | ||
|
|
56ea0f9ae7 | ||
|
|
6a6b2e79b0 | ||
|
|
bc2a628902 | ||
|
|
dec7879cc1 | ||
|
|
0a8118deed | ||
|
|
59a8165379 | ||
|
|
3a1d07136c | ||
|
|
a00756c469 | ||
|
|
7945fea0f9 | ||
|
|
84656b9812 | ||
|
|
b5d25f5e4f | ||
|
|
d4b0af3dba | ||
|
|
57d1f12574 |
36
.env.example
36
.env.example
@@ -23,6 +23,13 @@ DOMAIN_SERVER=http://localhost:3080
|
||||
|
||||
NO_INDEX=true
|
||||
|
||||
#===============#
|
||||
# JSON Logging #
|
||||
#===============#
|
||||
|
||||
# Use when process console logs in cloud deployment like GCP/AWS
|
||||
CONSOLE_JSON=false
|
||||
|
||||
#===============#
|
||||
# Debug Logging #
|
||||
#===============#
|
||||
@@ -71,7 +78,7 @@ PROXY=
|
||||
#============#
|
||||
|
||||
ANTHROPIC_API_KEY=user_provided
|
||||
# ANTHROPIC_MODELS=claude-3-opus-20240229,claude-3-sonnet-20240229,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_MODELS=claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307,claude-2.1,claude-2,claude-1.2,claude-1,claude-1-100k,claude-instant-1,claude-instant-1-100k
|
||||
# ANTHROPIC_REVERSE_PROXY=
|
||||
|
||||
#============#
|
||||
@@ -128,7 +135,7 @@ DEBUG_OPENAI=false
|
||||
|
||||
# OPENAI_REVERSE_PROXY=
|
||||
|
||||
# OPENAI_ORGANIZATION=
|
||||
# OPENAI_ORGANIZATION=
|
||||
|
||||
#====================#
|
||||
# Assistants API #
|
||||
@@ -141,7 +148,7 @@ ASSISTANTS_API_KEY=user_provided
|
||||
#============#
|
||||
# OpenRouter #
|
||||
#============#
|
||||
|
||||
# !!!Warning: Use the variable above instead of this one. Using this one will override the OpenAI endpoint
|
||||
# OPENROUTER_API_KEY=
|
||||
|
||||
#============#
|
||||
@@ -185,7 +192,7 @@ AZURE_AI_SEARCH_SEARCH_OPTION_SELECT=
|
||||
|
||||
# Google
|
||||
#-----------------
|
||||
GOOGLE_API_KEY=
|
||||
GOOGLE_SEARCH_API_KEY=
|
||||
GOOGLE_CSE_ID=
|
||||
|
||||
# SerpAPI
|
||||
@@ -309,6 +316,9 @@ OPENID_ISSUER=
|
||||
OPENID_SESSION_SECRET=
|
||||
OPENID_SCOPE="openid profile email"
|
||||
OPENID_CALLBACK_URL=/oauth/openid/callback
|
||||
OPENID_REQUIRED_ROLE=
|
||||
OPENID_REQUIRED_ROLE_TOKEN_KIND=
|
||||
OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
||||
|
||||
OPENID_BUTTON_LABEL=
|
||||
OPENID_IMAGE_URL=
|
||||
@@ -317,15 +327,15 @@ OPENID_IMAGE_URL=
|
||||
# Email Password Reset #
|
||||
#========================#
|
||||
|
||||
EMAIL_SERVICE=
|
||||
EMAIL_HOST=
|
||||
EMAIL_PORT=25
|
||||
EMAIL_ENCRYPTION=
|
||||
EMAIL_ENCRYPTION_HOSTNAME=
|
||||
EMAIL_ALLOW_SELFSIGNED=
|
||||
EMAIL_USERNAME=
|
||||
EMAIL_PASSWORD=
|
||||
EMAIL_FROM_NAME=
|
||||
EMAIL_SERVICE=
|
||||
EMAIL_HOST=
|
||||
EMAIL_PORT=25
|
||||
EMAIL_ENCRYPTION=
|
||||
EMAIL_ENCRYPTION_HOSTNAME=
|
||||
EMAIL_ALLOW_SELFSIGNED=
|
||||
EMAIL_USERNAME=
|
||||
EMAIL_PASSWORD=
|
||||
EMAIL_FROM_NAME=
|
||||
EMAIL_FROM=noreply@librechat.ai
|
||||
|
||||
#========================#
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yml
vendored
@@ -50,7 +50,7 @@ body:
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/CODE_OF_CONDUCT.md)
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/danny-avila/LibreChat/blob/main/.github/CODE_OF_CONDUCT.md)
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
||||
5
.github/workflows/backend-review.yml
vendored
5
.github/workflows/backend-review.yml
vendored
@@ -51,6 +51,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Prepare .env.test file
|
||||
run: cp api/test/.env.test.example api/test/.env.test
|
||||
|
||||
- name: Run unit tests
|
||||
run: cd api && npm run test:ci
|
||||
|
||||
@@ -60,4 +63,4 @@ jobs:
|
||||
- name: Run linters
|
||||
uses: wearerequired/lint-action@v2
|
||||
with:
|
||||
eslint: true
|
||||
eslint: true
|
||||
|
||||
83
.github/workflows/container.yml
vendored
83
.github/workflows/container.yml
vendored
@@ -1,83 +0,0 @@
|
||||
name: Docker Compose Build on Tag
|
||||
|
||||
# The workflow is triggered when a tag is pushed
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Check out the repository
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up Docker
|
||||
- name: Set up Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Set up QEMU for cross-platform builds
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Prepare Docker Build
|
||||
- name: Build Docker images
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
# Tag and push librechat-api
|
||||
- name: Docker metadata for librechat-api
|
||||
id: meta-librechat-api
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/librechat-api
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Build and librechat-api
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile.multi
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta-librechat-api.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: api-build
|
||||
|
||||
# Tag and push librechat
|
||||
- name: Docker metadata for librechat
|
||||
id: meta-librechat
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/librechat
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Build and librechat
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta-librechat.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: node
|
||||
88
.github/workflows/latest-images-main.yml
vendored
88
.github/workflows/latest-images-main.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Docker Compose Build Latest Tag (Manual Dispatch)
|
||||
|
||||
# The workflow is manually triggered
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Check out the repository
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Fetch all tags and set the latest tag
|
||||
- name: Fetch tags and set the latest tag
|
||||
run: |
|
||||
git fetch --tags
|
||||
echo "LATEST_TAG=$(git describe --tags `git rev-list --tags --max-count=1`)" >> $GITHUB_ENV
|
||||
|
||||
# Set up Docker
|
||||
- name: Set up Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Prepare Docker Build
|
||||
- name: Build Docker images
|
||||
run: cp .env.example .env
|
||||
|
||||
# Docker metadata for librechat-api
|
||||
- name: Docker metadata for librechat-api
|
||||
id: meta-librechat-api
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/librechat-api
|
||||
tags: |
|
||||
type=raw,value=${{ env.LATEST_TAG }},enable=true
|
||||
type=raw,value=latest,enable=true
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
# Build and push librechat-api
|
||||
- name: Build and push librechat-api
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile.multi
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta-librechat-api.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: api-build
|
||||
|
||||
# Docker metadata for librechat
|
||||
- name: Docker metadata for librechat
|
||||
id: meta-librechat
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/librechat
|
||||
tags: |
|
||||
type=raw,value=${{ env.LATEST_TAG }},enable=true
|
||||
type=raw,value=latest,enable=true
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
# Build and push librechat
|
||||
- name: Build and push librechat
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta-librechat.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: node
|
||||
56
.github/workflows/main-image-workflow.yml
vendored
56
.github/workflows/main-image-workflow.yml
vendored
@@ -1,12 +1,20 @@
|
||||
name: Docker Compose Build Latest Main Image Tag (Manual Dispatch)
|
||||
|
||||
# The workflow is manually triggered
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: api-build
|
||||
file: Dockerfile.multi
|
||||
image_name: librechat-api
|
||||
- target: node
|
||||
file: Dockerfile
|
||||
image_name: librechat
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -17,12 +25,15 @@ jobs:
|
||||
git fetch --tags
|
||||
echo "LATEST_TAG=$(git describe --tags `git rev-list --tags --max-count=1`)" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# Set up Docker Buildx
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
@@ -30,26 +41,29 @@ jobs:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Docker metadata for librechat
|
||||
- name: Docker metadata for librechat
|
||||
id: meta-librechat
|
||||
uses: docker/metadata-action@v5
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/librechat
|
||||
tags: |
|
||||
type=raw,value=${{ env.LATEST_TAG }},enable=true
|
||||
type=raw,value=latest,enable=true
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# Build and push librechat with only linux/amd64 platform
|
||||
- name: Build and push librechat
|
||||
# Prepare the environment
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
# Build and push Docker images for each target
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile
|
||||
context: .
|
||||
file: ${{ matrix.file }}
|
||||
push: true
|
||||
tags: ${{ steps.meta-librechat.outputs.tags }}
|
||||
platforms: linux/amd64
|
||||
target: node
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:${{ env.LATEST_TAG }}
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:latest
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
67
.github/workflows/tag-images.yml
vendored
Normal file
67
.github/workflows/tag-images.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
name: Docker Images Build on Tag
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- target: api-build
|
||||
file: Dockerfile.multi
|
||||
image_name: librechat-api
|
||||
- target: node
|
||||
file: Dockerfile
|
||||
image_name: librechat
|
||||
|
||||
steps:
|
||||
# Check out the repository
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Set up QEMU
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
# Set up Docker Buildx
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Login to Docker Hub
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# Prepare the environment
|
||||
- name: Prepare environment
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
# Build and push Docker images for each target
|
||||
- name: Build and push Docker images
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.file }}
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:${{ github.ref_name }}
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:latest
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:${{ github.ref_name }}
|
||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: ${{ matrix.target }}
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -50,6 +50,7 @@ bower_components/
|
||||
|
||||
#config file
|
||||
librechat.yaml
|
||||
librechat.yml
|
||||
|
||||
# Environment
|
||||
.npmrc
|
||||
@@ -92,4 +93,7 @@ auth.json
|
||||
!client/src/components/Nav/SettingsTabs/Data/
|
||||
|
||||
# User uploads
|
||||
uploads/
|
||||
uploads/
|
||||
|
||||
# owner
|
||||
release/
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
#!/usr/bin/env sh
|
||||
set -e
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
[ -n "$CI" ] && exit 0
|
||||
|
||||
12
Dockerfile
12
Dockerfile
@@ -1,5 +1,7 @@
|
||||
# v0.7.1
|
||||
|
||||
# Base node image
|
||||
FROM node:18-alpine AS node
|
||||
FROM node:18-alpine3.18 AS node
|
||||
|
||||
RUN apk add g++ make py3-pip
|
||||
RUN npm install -g node-gyp
|
||||
@@ -15,13 +17,19 @@ COPY --chown=node:node . .
|
||||
# Allow mounting of these files, which have no default
|
||||
# values.
|
||||
RUN touch .env
|
||||
RUN npm config set fetch-retry-maxtimeout 300000
|
||||
RUN npm config set fetch-retry-maxtimeout 600000
|
||||
RUN npm config set fetch-retries 5
|
||||
RUN npm config set fetch-retry-mintimeout 15000
|
||||
RUN npm install --no-audit
|
||||
|
||||
# React client build
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
RUN npm run frontend
|
||||
|
||||
# Create directories for the volumes to inherit
|
||||
# the correct permissions
|
||||
RUN mkdir -p /app/client/public/images /app/api/logs
|
||||
|
||||
# Node API setup
|
||||
EXPOSE 3080
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# v0.7.1
|
||||
|
||||
# Build API, Client and Data Provider
|
||||
FROM node:20-alpine AS base
|
||||
|
||||
@@ -11,11 +13,12 @@ RUN npm run build
|
||||
# React client build
|
||||
FROM data-provider-build AS client-build
|
||||
WORKDIR /app/client
|
||||
COPY ./client/ ./
|
||||
COPY ./client/package*.json ./
|
||||
# Copy data-provider to client's node_modules
|
||||
RUN mkdir -p /app/client/node_modules/librechat-data-provider/
|
||||
RUN cp -R /app/packages/data-provider/* /app/client/node_modules/librechat-data-provider/
|
||||
RUN npm install
|
||||
COPY ./client/ ./
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
RUN npm run build
|
||||
|
||||
@@ -24,6 +27,8 @@ FROM data-provider-build AS api-build
|
||||
WORKDIR /app/api
|
||||
COPY api/package*.json ./
|
||||
COPY api/ ./
|
||||
# Copy helper scripts
|
||||
COPY config/ ./
|
||||
# Copy data-provider to API's node_modules
|
||||
RUN mkdir -p /app/api/node_modules/librechat-data-provider/
|
||||
RUN cp -R /app/packages/data-provider/* /app/api/node_modules/librechat-data-provider/
|
||||
|
||||
14
README.md
14
README.md
@@ -27,7 +27,7 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://railway.app/template/b5k2mn?referralCode=HI9hWz">
|
||||
<a href="https://railway.app/template/b5k2mn?referralCode=myKrVZ">
|
||||
<img src="https://railway.app/button.svg" alt="Deploy on Railway" height="30">
|
||||
</a>
|
||||
<a href="https://zeabur.com/templates/0X2ZY8">
|
||||
@@ -42,9 +42,11 @@
|
||||
|
||||
- 🖥️ UI matching ChatGPT, including Dark mode, Streaming, and latest updates
|
||||
- 💬 Multimodal Chat:
|
||||
- Upload and analyze images with GPT-4 and Gemini Vision 📸
|
||||
- General file support now available through the Assistants API integration. 🗃️
|
||||
- Local RAG in Active Development 🚧
|
||||
- Upload and analyze images with Claude 3, GPT-4, and Gemini Vision 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, & Google. 🗃️
|
||||
- Advanced Agents with Files, Code Interpreter, Tools, and API Actions 🔦
|
||||
- Available through the [OpenAI Assistants API](https://platform.openai.com/docs/assistants/overview) 🌤️
|
||||
- Non-OpenAI Agents in Active Development 🚧
|
||||
- 🌎 Multilingual UI:
|
||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro,
|
||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||
@@ -55,7 +57,9 @@
|
||||
- 🔍 Search all messages/conversations
|
||||
- 🔌 Plugins, including web access, image generation with DALL-E-3 and more
|
||||
- 👥 Multi-User, Secure Authentication with Moderation and Token spend tools
|
||||
- ⚙️ Configure Proxy, Reverse Proxy, Docker, many Deployment options, and completely Open-Source
|
||||
- ⚙️ Configure Proxy, Reverse Proxy, Docker, & many Deployment options
|
||||
- 📖 Completely Open-Source & Built in Public
|
||||
- 🧑🤝🧑 Community-driven development, support, and feedback
|
||||
|
||||
[For a thorough review of our features, see our docs here](https://docs.librechat.ai/features/plugins/introduction.html) 📚
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require('dotenv').config();
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -23,10 +24,7 @@ const askBing = async ({
|
||||
|
||||
let key = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your BingAI Cookies have expired. Please provide your cookies again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.bingAI);
|
||||
key = await getUserKey({ userId, name: 'bingAI' });
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
require('dotenv').config();
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('../server/services/UserService');
|
||||
|
||||
const browserClient = async ({
|
||||
@@ -18,10 +18,7 @@ const browserClient = async ({
|
||||
|
||||
let key = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your ChatGPT Access Token has expired. Please provide your token again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.chatGPTBrowser);
|
||||
key = await getUserKey({ userId, name: 'chatGPTBrowser' });
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ class BaseClient {
|
||||
throw new Error('Method \'setOptions\' must be implemented.');
|
||||
}
|
||||
|
||||
getCompletion() {
|
||||
async getCompletion() {
|
||||
throw new Error('Method \'getCompletion\' must be implemented.');
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,13 @@ const crypto = require('crypto');
|
||||
const {
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
CohereConstants,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { CohereClient } = require('cohere-ai');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||
const { createCoherePayload } = require('./llm');
|
||||
const { Agent, ProxyAgent } = require('undici');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { logger } = require('~/config');
|
||||
@@ -147,7 +150,8 @@ class ChatGPTClient extends BaseClient {
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
async getCompletion(input, onProgress, abortController = null) {
|
||||
/** @type {getCompletion} */
|
||||
async getCompletion(input, onProgress, onTokenProgress, abortController = null) {
|
||||
if (!abortController) {
|
||||
abortController = new AbortController();
|
||||
}
|
||||
@@ -305,6 +309,11 @@ class ChatGPTClient extends BaseClient {
|
||||
});
|
||||
}
|
||||
|
||||
if (baseURL.startsWith(CohereConstants.API_URL)) {
|
||||
const payload = createCoherePayload({ modelOptions });
|
||||
return await this.cohereChatCompletion({ payload, onTokenProgress });
|
||||
}
|
||||
|
||||
if (baseURL.includes('v1') && !baseURL.includes('/completions') && !this.isChatCompletion) {
|
||||
baseURL = baseURL.split('v1')[0] + 'v1/completions';
|
||||
} else if (
|
||||
@@ -408,6 +417,35 @@ class ChatGPTClient extends BaseClient {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/** @type {cohereChatCompletion} */
|
||||
async cohereChatCompletion({ payload, onTokenProgress }) {
|
||||
const cohere = new CohereClient({
|
||||
token: this.apiKey,
|
||||
environment: this.completionsUrl,
|
||||
});
|
||||
|
||||
if (!payload.stream) {
|
||||
const chatResponse = await cohere.chat(payload);
|
||||
return chatResponse.text;
|
||||
}
|
||||
|
||||
const chatStream = await cohere.chatStream(payload);
|
||||
let reply = '';
|
||||
for await (const message of chatStream) {
|
||||
if (!message) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (message.eventType === 'text-generation' && message.text) {
|
||||
onTokenProgress(message.text);
|
||||
} else if (message.eventType === 'stream-end' && message.response) {
|
||||
reply = message.response.text;
|
||||
}
|
||||
}
|
||||
|
||||
return reply;
|
||||
}
|
||||
|
||||
async generateTitle(userMessage, botMessage) {
|
||||
const instructionsPayload = {
|
||||
role: 'system',
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
const { google } = require('googleapis');
|
||||
const { Agent, ProxyAgent } = require('undici');
|
||||
const { GoogleVertexAI } = require('langchain/llms/googlevertexai');
|
||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
||||
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
|
||||
const { GoogleVertexAI } = require('@langchain/community/llms/googlevertexai');
|
||||
const { ChatGoogleVertexAI } = require('langchain/chat_models/googlevertexai');
|
||||
const { AIMessage, HumanMessage, SystemMessage } = require('langchain/schema');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
@@ -10,6 +12,7 @@ const {
|
||||
getResponseSender,
|
||||
endpointSettings,
|
||||
EModelEndpoint,
|
||||
VisionModes,
|
||||
AuthKeys,
|
||||
} = require('librechat-data-provider');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
@@ -126,7 +129,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
||||
// TODO: as of 12/14/23, only gemini models are "Generative AI" models provided by Google
|
||||
/** @type {boolean} Whether using a "GenerativeAI" Model */
|
||||
this.isGenerativeModel = this.modelOptions.model.includes('gemini');
|
||||
const { isGenerativeModel } = this;
|
||||
this.isChatModel = !isGenerativeModel && this.modelOptions.model.includes('chat');
|
||||
@@ -234,7 +237,7 @@ class GoogleClient extends BaseClient {
|
||||
this.isVisionModel = true;
|
||||
}
|
||||
|
||||
if (this.isVisionModel && !attachments) {
|
||||
if (this.isVisionModel && !attachments && this.modelOptions.model.includes('gemini-pro')) {
|
||||
this.modelOptions.model = 'gemini-pro';
|
||||
this.isVisionModel = false;
|
||||
}
|
||||
@@ -247,6 +250,40 @@ class GoogleClient extends BaseClient {
|
||||
})).bind(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats messages for generative AI
|
||||
* @param {TMessage[]} messages
|
||||
* @returns
|
||||
*/
|
||||
async formatGenerativeMessages(messages) {
|
||||
const formattedMessages = [];
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
const files = await this.addImageURLs(latestMessage, attachments, VisionModes.generative);
|
||||
this.options.attachments = files;
|
||||
messages[messages.length - 1] = latestMessage;
|
||||
|
||||
for (const _message of messages) {
|
||||
const role = _message.isCreatedByUser ? this.userLabel : this.modelLabel;
|
||||
const parts = [];
|
||||
parts.push({ text: _message.text });
|
||||
if (!_message.image_urls?.length) {
|
||||
formattedMessages.push({ role, parts });
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const images of _message.image_urls) {
|
||||
if (images.inlineData) {
|
||||
parts.push({ inlineData: images.inlineData });
|
||||
}
|
||||
}
|
||||
|
||||
formattedMessages.push({ role, parts });
|
||||
}
|
||||
|
||||
return formattedMessages;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Adds image URLs to the message object and returns the files
|
||||
@@ -255,17 +292,23 @@ class GoogleClient extends BaseClient {
|
||||
* @param {MongoFile[]} files
|
||||
* @returns {Promise<MongoFile[]>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
async addImageURLs(message, attachments, mode = '') {
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
EModelEndpoint.google,
|
||||
mode,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
return files;
|
||||
}
|
||||
|
||||
async buildVisionMessages(messages = [], parentMessageId) {
|
||||
/**
|
||||
* Builds the augmented prompt for attachments
|
||||
* TODO: Add File API Support
|
||||
* @param {TMessage[]} messages
|
||||
*/
|
||||
async buildAugmentedPrompt(messages = []) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
this.contextHandlers = createContextHandlers(this.options.req, latestMessage.text);
|
||||
@@ -281,6 +324,12 @@ class GoogleClient extends BaseClient {
|
||||
this.augmentedPrompt = await this.contextHandlers.createContext();
|
||||
this.options.promptPrefix = this.augmentedPrompt + this.options.promptPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
async buildVisionMessages(messages = [], parentMessageId) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = { ...messages[messages.length - 1] };
|
||||
await this.buildAugmentedPrompt(messages);
|
||||
|
||||
const { prompt } = await this.buildMessagesPrompt(messages, parentMessageId);
|
||||
|
||||
@@ -301,15 +350,26 @@ class GoogleClient extends BaseClient {
|
||||
return { prompt: payload };
|
||||
}
|
||||
|
||||
/** @param {TMessage[]} [messages=[]] */
|
||||
async buildGenerativeMessages(messages = []) {
|
||||
this.userLabel = 'user';
|
||||
this.modelLabel = 'model';
|
||||
const promises = [];
|
||||
promises.push(await this.formatGenerativeMessages(messages));
|
||||
promises.push(this.buildAugmentedPrompt(messages));
|
||||
const [formattedMessages] = await Promise.all(promises);
|
||||
return { prompt: formattedMessages };
|
||||
}
|
||||
|
||||
async buildMessages(messages = [], parentMessageId) {
|
||||
if (!this.isGenerativeModel && !this.project_id) {
|
||||
throw new Error(
|
||||
'[GoogleClient] a Service Account JSON Key is required for PaLM 2 and Codey models (Vertex AI)',
|
||||
);
|
||||
} else if (this.isGenerativeModel && (!this.apiKey || this.apiKey === 'user_provided')) {
|
||||
throw new Error(
|
||||
'[GoogleClient] an API Key is required for Gemini models (Generative Language API)',
|
||||
);
|
||||
}
|
||||
|
||||
if (!this.project_id && this.modelOptions.model.includes('1.5')) {
|
||||
return await this.buildGenerativeMessages(messages);
|
||||
}
|
||||
|
||||
if (this.options.attachments && this.isGenerativeModel) {
|
||||
@@ -526,13 +586,24 @@ class GoogleClient extends BaseClient {
|
||||
}
|
||||
|
||||
createLLM(clientOptions) {
|
||||
if (this.isGenerativeModel) {
|
||||
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
||||
const model = clientOptions.modelName ?? clientOptions.model;
|
||||
if (this.project_id && this.isTextModel) {
|
||||
return new GoogleVertexAI(clientOptions);
|
||||
} else if (this.project_id && this.isChatModel) {
|
||||
return new ChatGoogleVertexAI(clientOptions);
|
||||
} else if (this.project_id) {
|
||||
return new ChatVertexAI(clientOptions);
|
||||
} else if (model.includes('1.5')) {
|
||||
return new GenAI(this.apiKey).getGenerativeModel(
|
||||
{
|
||||
...clientOptions,
|
||||
model,
|
||||
},
|
||||
{ apiVersion: 'v1beta' },
|
||||
);
|
||||
}
|
||||
|
||||
return this.isTextModel
|
||||
? new GoogleVertexAI(clientOptions)
|
||||
: new ChatGoogleVertexAI(clientOptions);
|
||||
return new ChatGoogleGenerativeAI({ ...clientOptions, apiKey: this.apiKey });
|
||||
}
|
||||
|
||||
async getCompletion(_payload, options = {}) {
|
||||
@@ -544,7 +615,7 @@ class GoogleClient extends BaseClient {
|
||||
|
||||
let clientOptions = { ...parameters, maxRetries: 2 };
|
||||
|
||||
if (!this.isGenerativeModel) {
|
||||
if (this.project_id) {
|
||||
clientOptions['authOptions'] = {
|
||||
credentials: {
|
||||
...this.serviceKey,
|
||||
@@ -557,7 +628,7 @@ class GoogleClient extends BaseClient {
|
||||
clientOptions = { ...clientOptions, ...this.modelOptions };
|
||||
}
|
||||
|
||||
if (this.isGenerativeModel) {
|
||||
if (this.isGenerativeModel && !this.project_id) {
|
||||
clientOptions.modelName = clientOptions.model;
|
||||
delete clientOptions.model;
|
||||
}
|
||||
@@ -588,16 +659,46 @@ class GoogleClient extends BaseClient {
|
||||
messages.unshift(new SystemMessage(context));
|
||||
}
|
||||
|
||||
const modelName = clientOptions.modelName ?? clientOptions.model ?? '';
|
||||
if (modelName?.includes('1.5') && !this.project_id) {
|
||||
/** @type {GenerativeModel} */
|
||||
const client = model;
|
||||
const requestOptions = {
|
||||
contents: _payload,
|
||||
};
|
||||
|
||||
if (this.options?.promptPrefix?.length) {
|
||||
requestOptions.systemInstruction = {
|
||||
parts: [
|
||||
{
|
||||
text: this.options.promptPrefix,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
const result = await client.generateContentStream(requestOptions);
|
||||
for await (const chunk of result.stream) {
|
||||
const chunkText = chunk.text();
|
||||
this.generateTextStream(chunkText, onProgress, {
|
||||
delay: 12,
|
||||
});
|
||||
reply += chunkText;
|
||||
}
|
||||
return reply;
|
||||
}
|
||||
|
||||
const stream = await model.stream(messages, {
|
||||
signal: abortController.signal,
|
||||
timeout: 7000,
|
||||
});
|
||||
|
||||
for await (const chunk of stream) {
|
||||
await this.generateTextStream(chunk?.content ?? chunk, onProgress, {
|
||||
const chunkText = chunk?.content ?? chunk;
|
||||
this.generateTextStream(chunkText, onProgress, {
|
||||
delay: this.isGenerativeModel ? 12 : 8,
|
||||
});
|
||||
reply += chunk?.content ?? chunk;
|
||||
reply += chunkText;
|
||||
}
|
||||
|
||||
return reply;
|
||||
|
||||
@@ -5,6 +5,7 @@ const {
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
ImageDetailCost,
|
||||
CohereConstants,
|
||||
getResponseSender,
|
||||
validateVisionModel,
|
||||
mapModelToAzureConfig,
|
||||
@@ -16,7 +17,13 @@ const {
|
||||
getModelMaxTokens,
|
||||
genAzureChatCompletion,
|
||||
} = require('~/utils');
|
||||
const { truncateText, formatMessage, createContextHandlers, CUT_OFF_PROMPT } = require('./prompts');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
createContextHandlers,
|
||||
CUT_OFF_PROMPT,
|
||||
titleInstruction,
|
||||
} = require('./prompts');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
@@ -39,7 +46,10 @@ class OpenAIClient extends BaseClient {
|
||||
super(apiKey, options);
|
||||
this.ChatGPTClient = new ChatGPTClient();
|
||||
this.buildPrompt = this.ChatGPTClient.buildPrompt.bind(this);
|
||||
/** @type {getCompletion} */
|
||||
this.getCompletion = this.ChatGPTClient.getCompletion.bind(this);
|
||||
/** @type {cohereChatCompletion} */
|
||||
this.cohereChatCompletion = this.ChatGPTClient.cohereChatCompletion.bind(this);
|
||||
this.contextStrategy = options.contextStrategy
|
||||
? options.contextStrategy.toLowerCase()
|
||||
: 'discard';
|
||||
@@ -48,6 +58,9 @@ class OpenAIClient extends BaseClient {
|
||||
this.azure = options.azure || false;
|
||||
this.setOptions(options);
|
||||
this.metadata = {};
|
||||
|
||||
/** @type {string | undefined} - The API Completions URL */
|
||||
this.completionsUrl;
|
||||
}
|
||||
|
||||
// TODO: PluginsClient calls this 3x, unneeded
|
||||
@@ -533,6 +546,7 @@ class OpenAIClient extends BaseClient {
|
||||
return result;
|
||||
}
|
||||
|
||||
/** @type {sendCompletion} */
|
||||
async sendCompletion(payload, opts = {}) {
|
||||
let reply = '';
|
||||
let result = null;
|
||||
@@ -541,7 +555,7 @@ class OpenAIClient extends BaseClient {
|
||||
const invalidBaseUrl = this.completionsUrl && extractBaseURL(this.completionsUrl) === null;
|
||||
const useOldMethod = !!(invalidBaseUrl || !this.isChatCompletion || typeof Bun !== 'undefined');
|
||||
if (typeof opts.onProgress === 'function' && useOldMethod) {
|
||||
await this.getCompletion(
|
||||
const completionResult = await this.getCompletion(
|
||||
payload,
|
||||
(progressMessage) => {
|
||||
if (progressMessage === '[DONE]') {
|
||||
@@ -574,8 +588,13 @@ class OpenAIClient extends BaseClient {
|
||||
opts.onProgress(token);
|
||||
reply += token;
|
||||
},
|
||||
opts.onProgress,
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
|
||||
if (completionResult && typeof completionResult === 'string') {
|
||||
reply = completionResult;
|
||||
}
|
||||
} else if (typeof opts.onProgress === 'function' || this.options.useChatCompletion) {
|
||||
reply = await this.chatCompletion({
|
||||
payload,
|
||||
@@ -586,9 +605,14 @@ class OpenAIClient extends BaseClient {
|
||||
result = await this.getCompletion(
|
||||
payload,
|
||||
null,
|
||||
opts.onProgress,
|
||||
opts.abortController || new AbortController(),
|
||||
);
|
||||
|
||||
if (result && typeof result === 'string') {
|
||||
return result.trim();
|
||||
}
|
||||
|
||||
logger.debug('[OpenAIClient] sendCompletion: result', result);
|
||||
|
||||
if (this.isChatCompletion) {
|
||||
@@ -760,8 +784,7 @@ class OpenAIClient extends BaseClient {
|
||||
const instructionsPayload = [
|
||||
{
|
||||
role: 'system',
|
||||
content: `Detect user language and write in the same language an extremely concise title for this conversation, which you must accurately detect.
|
||||
Write in the detected language. Title in 5 Words or Less. No Punctuation or Quotation. Do not mention the language. All first letters of every word should be capitalized and write the title in User Language only.
|
||||
content: `Please generate ${titleInstruction}
|
||||
|
||||
${convo}
|
||||
|
||||
@@ -769,10 +792,18 @@ ${convo}
|
||||
},
|
||||
];
|
||||
|
||||
const promptTokens = this.getTokenCountForMessage(instructionsPayload[0]);
|
||||
|
||||
try {
|
||||
let useChatCompletion = true;
|
||||
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
||||
useChatCompletion = false;
|
||||
}
|
||||
title = (
|
||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion: true })
|
||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
||||
).replaceAll('"', '');
|
||||
const completionTokens = this.getTokenCount(title);
|
||||
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
'[OpenAIClient] There was an issue generating the title with the completion method',
|
||||
@@ -924,12 +955,12 @@ ${convo}
|
||||
}
|
||||
}
|
||||
|
||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||
async recordTokenUsage({ promptTokens, completionTokens, context = 'message' }) {
|
||||
await spendTokens(
|
||||
{
|
||||
context,
|
||||
user: this.user,
|
||||
model: this.modelOptions.model,
|
||||
context: 'message',
|
||||
conversationId: this.conversationId,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
},
|
||||
|
||||
@@ -244,7 +244,7 @@ class PluginsClient extends OpenAIClient {
|
||||
this.setOptions(opts);
|
||||
return super.sendMessage(message, opts);
|
||||
}
|
||||
logger.debug('[PluginsClient] sendMessage', { message, opts });
|
||||
logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts });
|
||||
const {
|
||||
user,
|
||||
isEdited,
|
||||
|
||||
85
api/app/clients/llm/createCoherePayload.js
Normal file
85
api/app/clients/llm/createCoherePayload.js
Normal file
@@ -0,0 +1,85 @@
|
||||
const { CohereConstants } = require('librechat-data-provider');
|
||||
const { titleInstruction } = require('../prompts/titlePrompts');
|
||||
|
||||
// Mapping OpenAI roles to Cohere roles
|
||||
const roleMap = {
|
||||
user: CohereConstants.ROLE_USER,
|
||||
assistant: CohereConstants.ROLE_CHATBOT,
|
||||
system: CohereConstants.ROLE_SYSTEM, // Recognize and map the system role explicitly
|
||||
};
|
||||
|
||||
/**
|
||||
* Adjusts an OpenAI ChatCompletionPayload to conform with Cohere's expected chat payload format.
|
||||
* Now includes handling for "system" roles explicitly mentioned.
|
||||
*
|
||||
* @param {Object} options - Object containing the model options.
|
||||
* @param {ChatCompletionPayload} options.modelOptions - The OpenAI model payload options.
|
||||
* @returns {CohereChatStreamRequest} Cohere-compatible chat API payload.
|
||||
*/
|
||||
function createCoherePayload({ modelOptions }) {
|
||||
/** @type {string | undefined} */
|
||||
let preamble;
|
||||
let latestUserMessageContent = '';
|
||||
const {
|
||||
stream,
|
||||
stop,
|
||||
top_p,
|
||||
temperature,
|
||||
frequency_penalty,
|
||||
presence_penalty,
|
||||
max_tokens,
|
||||
messages,
|
||||
model,
|
||||
...rest
|
||||
} = modelOptions;
|
||||
|
||||
// Filter out the latest user message and transform remaining messages to Cohere's chat_history format
|
||||
let chatHistory = messages.reduce((acc, message, index, arr) => {
|
||||
const isLastUserMessage = index === arr.length - 1 && message.role === 'user';
|
||||
|
||||
const messageContent =
|
||||
typeof message.content === 'string'
|
||||
? message.content
|
||||
: message.content.map((part) => (part.type === 'text' ? part.text : '')).join(' ');
|
||||
|
||||
if (isLastUserMessage) {
|
||||
latestUserMessageContent = messageContent;
|
||||
} else {
|
||||
acc.push({
|
||||
role: roleMap[message.role] || CohereConstants.ROLE_USER,
|
||||
message: messageContent,
|
||||
});
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
if (
|
||||
chatHistory.length === 1 &&
|
||||
chatHistory[0].role === CohereConstants.ROLE_SYSTEM &&
|
||||
!latestUserMessageContent.length
|
||||
) {
|
||||
const message = chatHistory[0].message;
|
||||
latestUserMessageContent = message.includes(titleInstruction)
|
||||
? CohereConstants.TITLE_MESSAGE
|
||||
: '.';
|
||||
preamble = message;
|
||||
}
|
||||
|
||||
return {
|
||||
message: latestUserMessageContent,
|
||||
model: model,
|
||||
chatHistory,
|
||||
stream: stream ?? false,
|
||||
temperature: temperature,
|
||||
frequencyPenalty: frequency_penalty,
|
||||
presencePenalty: presence_penalty,
|
||||
maxTokens: max_tokens,
|
||||
stopSequences: stop,
|
||||
preamble,
|
||||
p: top_p,
|
||||
...rest,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = createCoherePayload;
|
||||
@@ -1,7 +1,9 @@
|
||||
const createLLM = require('./createLLM');
|
||||
const RunManager = require('./RunManager');
|
||||
const createCoherePayload = require('./createCoherePayload');
|
||||
|
||||
module.exports = {
|
||||
createLLM,
|
||||
RunManager,
|
||||
createCoherePayload,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const axios = require('axios');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const footer = `Use the context as your learned knowledge to better answer the user.
|
||||
|
||||
@@ -55,7 +56,7 @@ function createContextHandlers(req, userMessageContent) {
|
||||
processedFiles.push(file);
|
||||
processedIds.add(file.file_id);
|
||||
} catch (error) {
|
||||
console.error(`Error processing file ${file.filename}:`, error);
|
||||
logger.error(`Error processing file ${file.filename}:`, error);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -144,8 +145,8 @@ function createContextHandlers(req, userMessageContent) {
|
||||
|
||||
return prompt;
|
||||
} catch (error) {
|
||||
console.error('Error creating context:', error);
|
||||
throw error; // Re-throw the error to propagate it to the caller
|
||||
logger.error('Error creating context:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ module.exports = {
|
||||
...handleInputs,
|
||||
...instructions,
|
||||
...titlePrompts,
|
||||
truncateText,
|
||||
...truncateText,
|
||||
createVisionPrompt,
|
||||
createContextHandlers,
|
||||
};
|
||||
|
||||
@@ -27,6 +27,8 @@ ${convo}`,
|
||||
return titlePrompt;
|
||||
};
|
||||
|
||||
const titleInstruction =
|
||||
'a concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. For English, use AP Stylebook Title Case. Never directly mention the language name or the word "title"';
|
||||
const titleFunctionPrompt = `In this environment you have access to a set of tools you can use to generate the conversation title.
|
||||
|
||||
You may call them like this:
|
||||
@@ -51,7 +53,7 @@ Submit a brief title in the conversation's language, following the parameter des
|
||||
<parameter>
|
||||
<name>title</name>
|
||||
<type>string</type>
|
||||
<description>A concise, 5-word-or-less title for the conversation, using its same language, with no punctuation. Apply title case conventions appropriate for the language. For English, use AP Stylebook Title Case. Never directly mention the language name or the word "title"</description>
|
||||
<description>${titleInstruction}</description>
|
||||
</parameter>
|
||||
</parameters>
|
||||
</tool_description>
|
||||
@@ -80,6 +82,7 @@ function parseTitleFromPrompt(prompt) {
|
||||
|
||||
module.exports = {
|
||||
langPrompt,
|
||||
titleInstruction,
|
||||
createTitlePrompt,
|
||||
titleFunctionPrompt,
|
||||
parseTitleFromPrompt,
|
||||
|
||||
@@ -1,10 +1,40 @@
|
||||
const MAX_CHAR = 255;
|
||||
|
||||
function truncateText(text) {
|
||||
if (text.length > MAX_CHAR) {
|
||||
return `${text.slice(0, MAX_CHAR)}... [text truncated for brevity]`;
|
||||
/**
|
||||
* Truncates a given text to a specified maximum length, appending ellipsis and a notification
|
||||
* if the original text exceeds the maximum length.
|
||||
*
|
||||
* @param {string} text - The text to be truncated.
|
||||
* @param {number} [maxLength=MAX_CHAR] - The maximum length of the text after truncation. Defaults to MAX_CHAR.
|
||||
* @returns {string} The truncated text if the original text length exceeds maxLength, otherwise returns the original text.
|
||||
*/
|
||||
function truncateText(text, maxLength = MAX_CHAR) {
|
||||
if (text.length > maxLength) {
|
||||
return `${text.slice(0, maxLength)}... [text truncated for brevity]`;
|
||||
}
|
||||
return text;
|
||||
}
|
||||
|
||||
module.exports = truncateText;
|
||||
/**
|
||||
* Truncates a given text to a specified maximum length by showing the first half and the last half of the text,
|
||||
* separated by ellipsis. This method ensures the output does not exceed the maximum length, including the addition
|
||||
* of ellipsis and notification if the original text exceeds the maximum length.
|
||||
*
|
||||
* @param {string} text - The text to be truncated.
|
||||
* @param {number} [maxLength=MAX_CHAR] - The maximum length of the output text after truncation. Defaults to MAX_CHAR.
|
||||
* @returns {string} The truncated text showing the first half and the last half, or the original text if it does not exceed maxLength.
|
||||
*/
|
||||
function smartTruncateText(text, maxLength = MAX_CHAR) {
|
||||
const ellipsis = '...';
|
||||
const notification = ' [text truncated for brevity]';
|
||||
const halfMaxLength = Math.floor((maxLength - ellipsis.length - notification.length) / 2);
|
||||
|
||||
if (text.length > maxLength) {
|
||||
const startLastHalf = text.length - halfMaxLength;
|
||||
return `${text.slice(0, halfMaxLength)}${ellipsis}${text.slice(startLastHalf)}${notification}`;
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
module.exports = { truncateText, smartTruncateText };
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"description": "This is your Google Custom Search Engine ID. For instructions on how to obtain this, see <a href='https://github.com/danny-avila/LibreChat/blob/main/docs/features/plugins/google_search.md'>Our Docs</a>."
|
||||
},
|
||||
{
|
||||
"authField": "GOOGLE_API_KEY",
|
||||
"authField": "GOOGLE_SEARCH_API_KEY",
|
||||
"label": "Google API Key",
|
||||
"description": "This is your Google Custom Search API Key. For instructions on how to obtain this, see <a href='https://github.com/danny-avila/LibreChat/blob/main/docs/features/plugins/google_search.md'>Our Docs</a>."
|
||||
}
|
||||
@@ -60,7 +60,7 @@
|
||||
"name": "CodeSherpa",
|
||||
"pluginKey": "codesherpa_tools",
|
||||
"description": "[Experimental] A REPL for your chat. Requires https://github.com/iamgreggarcia/codesherpa",
|
||||
"icon": "https://github.com/iamgreggarcia/codesherpa/blob/main/localserver/_logo.png",
|
||||
"icon": "https://raw.githubusercontent.com/iamgreggarcia/codesherpa/main/localserver/_logo.png",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "CODESHERPA_SERVER_URL",
|
||||
|
||||
@@ -12,14 +12,15 @@ const { logger } = require('~/config');
|
||||
class DALLE3 extends Tool {
|
||||
constructor(fields = {}) {
|
||||
super();
|
||||
/* Used to initialize the Tool without necessary variables. */
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
this.override = fields.override ?? false;
|
||||
/* Necessary for output to contain all image metadata. */
|
||||
/** @type {boolean} Necessary for output to contain all image metadata. */
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
|
||||
this.userId = fields.userId;
|
||||
this.fileStrategy = fields.fileStrategy;
|
||||
if (fields.processFileURL) {
|
||||
/** @type {processFileURL} Necessary for output to contain all image metadata. */
|
||||
this.processFileURL = fields.processFileURL.bind(this);
|
||||
}
|
||||
|
||||
@@ -43,6 +44,7 @@ class DALLE3 extends Tool {
|
||||
config.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
/** @type {OpenAI} */
|
||||
this.openai = new OpenAI(config);
|
||||
this.name = 'dalle';
|
||||
this.description = `Use DALLE to create images from text descriptions.
|
||||
@@ -164,13 +166,7 @@ Error Message: ${error.message}`;
|
||||
});
|
||||
|
||||
if (this.returnMetadata) {
|
||||
this.result = {
|
||||
file_id: result.file_id,
|
||||
filename: result.filename,
|
||||
filepath: result.filepath,
|
||||
height: result.height,
|
||||
width: result.width,
|
||||
};
|
||||
this.result = result;
|
||||
} else {
|
||||
this.result = this.wrapInMarkdown(result.filepath);
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ class GoogleSearchResults extends Tool {
|
||||
|
||||
constructor(fields = {}) {
|
||||
super(fields);
|
||||
this.envVarApiKey = 'GOOGLE_API_KEY';
|
||||
this.envVarApiKey = 'GOOGLE_SEARCH_API_KEY';
|
||||
this.envVarSearchEngineId = 'GOOGLE_CSE_ID';
|
||||
this.override = fields.override ?? false;
|
||||
this.apiKey = fields.apiKey ?? getEnvironmentVariable(this.envVarApiKey);
|
||||
|
||||
@@ -4,14 +4,27 @@ const { z } = require('zod');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const sharp = require('sharp');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { StructuredTool } = require('langchain/tools');
|
||||
const { FileContext } = require('librechat-data-provider');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
class StableDiffusionAPI extends StructuredTool {
|
||||
constructor(fields) {
|
||||
super();
|
||||
/* Used to initialize the Tool without necessary variables. */
|
||||
/** @type {string} User ID */
|
||||
this.userId = fields.userId;
|
||||
/** @type {Express.Request | undefined} Express Request object, only provided by ToolService */
|
||||
this.req = fields.req;
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
this.override = fields.override ?? false;
|
||||
/** @type {boolean} Necessary for output to contain all image metadata. */
|
||||
this.returnMetadata = fields.returnMetadata ?? false;
|
||||
if (fields.uploadImageBuffer) {
|
||||
/** @type {uploadImageBuffer} Necessary for output to contain all image metadata. */
|
||||
this.uploadImageBuffer = fields.uploadImageBuffer.bind(this);
|
||||
}
|
||||
|
||||
this.name = 'stable-diffusion';
|
||||
this.url = fields.SD_WEBUI_URL || this.getServerURL();
|
||||
@@ -47,7 +60,7 @@ class StableDiffusionAPI extends StructuredTool {
|
||||
|
||||
getMarkdownImageUrl(imageName) {
|
||||
const imageUrl = path
|
||||
.join(this.relativeImageUrl, imageName)
|
||||
.join(this.relativePath, this.userId, imageName)
|
||||
.replace(/\\/g, '/')
|
||||
.replace('public/', '');
|
||||
return ``;
|
||||
@@ -73,46 +86,67 @@ class StableDiffusionAPI extends StructuredTool {
|
||||
width: 1024,
|
||||
height: 1024,
|
||||
};
|
||||
const response = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
||||
const image = response.data.images[0];
|
||||
const pngPayload = { image: `data:image/png;base64,${image}` };
|
||||
const response2 = await axios.post(`${url}/sdapi/v1/png-info`, pngPayload);
|
||||
const info = response2.data.info;
|
||||
const generationResponse = await axios.post(`${url}/sdapi/v1/txt2img`, payload);
|
||||
const image = generationResponse.data.images[0];
|
||||
|
||||
// Generate unique name
|
||||
const imageName = `${Date.now()}.png`;
|
||||
this.outputPath = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
);
|
||||
const appRoot = path.resolve(__dirname, '..', '..', '..', '..', '..', 'client');
|
||||
this.relativeImageUrl = path.relative(appRoot, this.outputPath);
|
||||
/** @type {{ height: number, width: number, seed: number, infotexts: string[] }} */
|
||||
let info = {};
|
||||
try {
|
||||
info = JSON.parse(generationResponse.data.info);
|
||||
} catch (error) {
|
||||
logger.error('[StableDiffusion] Error while getting image metadata:', error);
|
||||
}
|
||||
|
||||
// Check if directory exists, if not create it
|
||||
if (!fs.existsSync(this.outputPath)) {
|
||||
fs.mkdirSync(this.outputPath, { recursive: true });
|
||||
const file_id = uuidv4();
|
||||
const imageName = `${file_id}.png`;
|
||||
const { imageOutput: imageOutputPath, clientPath } = paths;
|
||||
const filepath = path.join(imageOutputPath, this.userId, imageName);
|
||||
this.relativePath = path.relative(clientPath, imageOutputPath);
|
||||
|
||||
if (!fs.existsSync(path.join(imageOutputPath, this.userId))) {
|
||||
fs.mkdirSync(path.join(imageOutputPath, this.userId), { recursive: true });
|
||||
}
|
||||
|
||||
try {
|
||||
const buffer = Buffer.from(image.split(',', 1)[0], 'base64');
|
||||
if (this.returnMetadata && this.uploadImageBuffer && this.req) {
|
||||
const file = await this.uploadImageBuffer({
|
||||
req: this.req,
|
||||
context: FileContext.image_generation,
|
||||
resize: false,
|
||||
metadata: {
|
||||
buffer,
|
||||
height: info.height,
|
||||
width: info.width,
|
||||
bytes: Buffer.byteLength(buffer),
|
||||
filename: imageName,
|
||||
type: 'image/png',
|
||||
file_id,
|
||||
},
|
||||
});
|
||||
|
||||
const generationInfo = info.infotexts[0].split('\n').pop();
|
||||
return {
|
||||
...file,
|
||||
prompt,
|
||||
metadata: {
|
||||
negative_prompt,
|
||||
seed: info.seed,
|
||||
info: generationInfo,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await sharp(buffer)
|
||||
.withMetadata({
|
||||
iptcpng: {
|
||||
parameters: info,
|
||||
parameters: info.infotexts[0],
|
||||
},
|
||||
})
|
||||
.toFile(this.outputPath + '/' + imageName);
|
||||
.toFile(filepath);
|
||||
this.result = this.getMarkdownImageUrl(imageName);
|
||||
} catch (error) {
|
||||
logger.error('[StableDiffusion] Error while saving the image:', error);
|
||||
// this.result = theImageUrl;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
|
||||
@@ -237,9 +237,11 @@ const loadTools = async ({
|
||||
}
|
||||
|
||||
const imageGenOptions = {
|
||||
req: options.req,
|
||||
fileStrategy: options.fileStrategy,
|
||||
processFileURL: options.processFileURL,
|
||||
returnMetadata: options.returnMetadata,
|
||||
uploadImageBuffer: options.uploadImageBuffer,
|
||||
};
|
||||
|
||||
const toolOptions = {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { availableTools } = require('../');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Loads a suite of tools with authentication values for a given user, supporting alternate authentication fields.
|
||||
@@ -30,7 +31,7 @@ const loadToolSuite = async ({ pluginKey, tools, user, options = {} }) => {
|
||||
return value;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Error fetching plugin auth value for ${field}: ${err.message}`);
|
||||
logger.error(`Error fetching plugin auth value for ${field}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
@@ -41,7 +42,7 @@ const loadToolSuite = async ({ pluginKey, tools, user, options = {} }) => {
|
||||
if (authValue !== null) {
|
||||
authValues[auth.authField] = authValue;
|
||||
} else {
|
||||
console.warn(`No auth value found for ${auth.authField}`);
|
||||
logger.warn(`[loadToolSuite] No auth value found for ${auth.authField}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
7
api/cache/banViolation.js
vendored
7
api/cache/banViolation.js
vendored
@@ -1,6 +1,7 @@
|
||||
const Session = require('~/models/Session');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const Session = require('~/models/Session');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||
@@ -48,7 +49,7 @@ const banViolation = async (req, res, errorMessage) => {
|
||||
await Session.deleteAllUserSessions(user_id);
|
||||
res.clearCookie('refreshToken');
|
||||
|
||||
const banLogs = getLogStores('ban');
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = errorMessage.duration || banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
|
||||
3
api/cache/banViolation.spec.js
vendored
3
api/cache/banViolation.spec.js
vendored
@@ -6,6 +6,7 @@ jest.mock('../models/Session');
|
||||
jest.mock('./getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const math = require('../server/utils/math');
|
||||
const mockGet = jest.fn();
|
||||
const mockSet = jest.fn();
|
||||
@@ -33,7 +34,7 @@ jest.mock('./getLogStores', () => {
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: 'bans',
|
||||
namespace: CacheKeys.BANS,
|
||||
ttl: math(process.env.BAN_DURATION, 7200000),
|
||||
});
|
||||
});
|
||||
|
||||
12
api/cache/getLogStores.js
vendored
12
api/cache/getLogStores.js
vendored
@@ -6,6 +6,7 @@ const keyvRedis = require('./keyvRedis');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
|
||||
const { BAN_DURATION, USE_REDIS } = process.env ?? {};
|
||||
const THIRTY_MINUTES = 1800000;
|
||||
|
||||
const duration = math(BAN_DURATION, 7200000);
|
||||
|
||||
@@ -24,8 +25,8 @@ const config = isEnabled(USE_REDIS)
|
||||
: new Keyv({ namespace: CacheKeys.CONFIG_STORE });
|
||||
|
||||
const tokenConfig = isEnabled(USE_REDIS) // ttl: 30 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: 1800000 })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: 1800000 });
|
||||
? new Keyv({ store: keyvRedis, ttl: THIRTY_MINUTES })
|
||||
: new Keyv({ namespace: CacheKeys.TOKEN_CONFIG, ttl: THIRTY_MINUTES });
|
||||
|
||||
const genTitle = isEnabled(USE_REDIS) // ttl: 2 minutes
|
||||
? new Keyv({ store: keyvRedis, ttl: 120000 })
|
||||
@@ -42,7 +43,12 @@ const abortKeys = isEnabled(USE_REDIS)
|
||||
const namespaces = {
|
||||
[CacheKeys.CONFIG_STORE]: config,
|
||||
pending_req,
|
||||
ban: new Keyv({ store: keyvMongo, namespace: 'bans', ttl: duration }),
|
||||
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({
|
||||
store: keyvMongo,
|
||||
namespace: CacheKeys.ENCODED_DOMAINS,
|
||||
ttl: 0,
|
||||
}),
|
||||
general: new Keyv({ store: logFile, namespace: 'violations' }),
|
||||
concurrent: createViolationInstance('concurrent'),
|
||||
non_browser: createViolationInstance('non_browser'),
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = {
|
||||
root: path.resolve(__dirname, '..', '..'),
|
||||
uploads: path.resolve(__dirname, '..', '..', 'uploads'),
|
||||
clientPath: path.resolve(__dirname, '..', '..', 'client'),
|
||||
dist: path.resolve(__dirname, '..', '..', 'client', 'dist'),
|
||||
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
||||
fonts: path.resolve(__dirname, '..', '..', 'client', 'public', 'fonts'),
|
||||
assets: path.resolve(__dirname, '..', '..', 'client', 'public', 'assets'),
|
||||
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
||||
structuredTools: path.resolve(__dirname, '..', 'app', 'clients', 'tools', 'structured'),
|
||||
pluginManifest: path.resolve(__dirname, '..', 'app', 'clients', 'tools', 'manifest.json'),
|
||||
|
||||
@@ -5,7 +5,15 @@ const { redactFormat, redactMessage, debugTraverse } = require('./parsers');
|
||||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, DEBUG_CONSOLE = false } = process.env;
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, DEBUG_CONSOLE = false, CONSOLE_JSON = false } = process.env;
|
||||
|
||||
const useConsoleJson =
|
||||
(typeof CONSOLE_JSON === 'string' && CONSOLE_JSON?.toLowerCase() === 'true') ||
|
||||
CONSOLE_JSON === true;
|
||||
|
||||
const useDebugConsole =
|
||||
(typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE?.toLowerCase() === 'true') ||
|
||||
DEBUG_CONSOLE === true;
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
@@ -33,7 +41,7 @@ const level = () => {
|
||||
|
||||
const fileFormat = winston.format.combine(
|
||||
redactFormat(),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.timestamp({ format: () => new Date().toISOString() }),
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.splat(),
|
||||
// redactErrors(),
|
||||
@@ -99,14 +107,20 @@ const consoleFormat = winston.format.combine(
|
||||
}),
|
||||
);
|
||||
|
||||
if (
|
||||
(typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE?.toLowerCase() === 'true') ||
|
||||
DEBUG_CONSOLE === true
|
||||
) {
|
||||
if (useDebugConsole) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'debug',
|
||||
format: winston.format.combine(fileFormat, debugTraverse),
|
||||
format: useConsoleJson
|
||||
? winston.format.combine(fileFormat, debugTraverse, winston.format.json())
|
||||
: winston.format.combine(fileFormat, debugTraverse),
|
||||
}),
|
||||
);
|
||||
} else if (useConsoleJson) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'info',
|
||||
format: winston.format.combine(fileFormat, winston.format.json()),
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
|
||||
@@ -5,19 +5,18 @@ const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
/**
|
||||
* Update an action with new data without overwriting existing properties,
|
||||
* or create a new action if it doesn't exist.
|
||||
* or create a new action if it doesn't exist, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the action to update.
|
||||
* @param {string} searchParams.action_id - The ID of the action to update.
|
||||
* @param {string} searchParams.user - The user ID of the action's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use.
|
||||
* @returns {Promise<Object>} The updated or newly created action document as a plain object.
|
||||
*/
|
||||
const updateAction = async (searchParams, updateData) => {
|
||||
return await Action.findOneAndUpdate(searchParams, updateData, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
}).lean();
|
||||
const updateAction = async (searchParams, updateData, session = null) => {
|
||||
const options = { new: true, upsert: true, session };
|
||||
return await Action.findOneAndUpdate(searchParams, updateData, options).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -50,15 +49,17 @@ const getActions = async (searchParams, includeSensitive = false) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes an action by its ID.
|
||||
* Deletes an action by params, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the action to update.
|
||||
* @param {string} searchParams.action_id - The ID of the action to update.
|
||||
* @param {Object} searchParams - The search parameters to find the action to delete.
|
||||
* @param {string} searchParams.action_id - The ID of the action to delete.
|
||||
* @param {string} searchParams.user - The user ID of the action's author.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||
* @returns {Promise<Object>} A promise that resolves to the deleted action document as a plain object, or null if no document was found.
|
||||
*/
|
||||
const deleteAction = async (searchParams) => {
|
||||
return await Action.findOneAndDelete(searchParams).lean();
|
||||
const deleteAction = async (searchParams, session = null) => {
|
||||
const options = session ? { session } : {};
|
||||
return await Action.findOneAndDelete(searchParams, options).lean();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -5,19 +5,18 @@ const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
/**
|
||||
* Update an assistant with new data without overwriting existing properties,
|
||||
* or create a new assistant if it doesn't exist.
|
||||
* or create a new assistant if it doesn't exist, within a transaction session if provided.
|
||||
*
|
||||
* @param {Object} searchParams - The search parameters to find the assistant to update.
|
||||
* @param {string} searchParams.assistant_id - The ID of the assistant to update.
|
||||
* @param {string} searchParams.user - The user ID of the assistant's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||
* @returns {Promise<Object>} The updated or newly created assistant document as a plain object.
|
||||
*/
|
||||
const updateAssistant = async (searchParams, updateData) => {
|
||||
return await Assistant.findOneAndUpdate(searchParams, updateData, {
|
||||
new: true,
|
||||
upsert: true,
|
||||
}).lean();
|
||||
const updateAssistant = async (searchParams, updateData, session = null) => {
|
||||
const options = { new: true, upsert: true, session };
|
||||
return await Assistant.findOneAndUpdate(searchParams, updateData, options).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,6 +2,7 @@ const mongoose = require('mongoose');
|
||||
const { isEnabled } = require('../server/utils/handleText');
|
||||
const transactionSchema = require('./schema/transaction');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
const cancelRate = 1.15;
|
||||
|
||||
@@ -11,7 +12,7 @@ transactionSchema.methods.calculateTokenValue = function () {
|
||||
this.tokenValue = this.rawAmount;
|
||||
}
|
||||
const { valueKey, tokenType, model, endpointTokenConfig } = this;
|
||||
const multiplier = getMultiplier({ valueKey, tokenType, model, endpointTokenConfig });
|
||||
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
||||
this.rate = multiplier;
|
||||
this.tokenValue = this.rawAmount * multiplier;
|
||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
||||
@@ -35,18 +36,24 @@ transactionSchema.statics.create = async function (transactionData) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Adjust the user's balance
|
||||
const updatedBalance = await Balance.findOneAndUpdate(
|
||||
let balance = await Balance.findOne({ user: transaction.user }).lean();
|
||||
let incrementValue = transaction.tokenValue;
|
||||
|
||||
if (balance && balance?.tokenCredits + incrementValue < 0) {
|
||||
incrementValue = -balance.tokenCredits;
|
||||
}
|
||||
|
||||
balance = await Balance.findOneAndUpdate(
|
||||
{ user: transaction.user },
|
||||
{ $inc: { tokenCredits: transaction.tokenValue } },
|
||||
{ $inc: { tokenCredits: incrementValue } },
|
||||
{ upsert: true, new: true },
|
||||
).lean();
|
||||
|
||||
return {
|
||||
rate: transaction.rate,
|
||||
user: transaction.user.toString(),
|
||||
balance: updatedBalance.tokenCredits,
|
||||
[transaction.tokenType]: transaction.tokenValue,
|
||||
balance: balance.tokenCredits,
|
||||
[transaction.tokenType]: incrementValue,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -64,7 +71,7 @@ async function getTransactions(filter) {
|
||||
try {
|
||||
return await Transaction.find(filter).lean();
|
||||
} catch (error) {
|
||||
console.error('Error querying transactions:', error);
|
||||
logger.error('Error querying transactions:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,9 @@ const mongoose = require('mongoose');
|
||||
* @property {'file'} object - Type of object, always 'file'
|
||||
* @property {string} type - Type of file
|
||||
* @property {number} usage - Number of uses of the file
|
||||
* @property {string} [context] - Context of the file origin
|
||||
* @property {boolean} [embedded] - Whether or not the file is embedded in vector db
|
||||
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
||||
* @property {string} [source] - The source of the file
|
||||
* @property {number} [width] - Optional width of the file
|
||||
* @property {number} [height] - Optional height of the file
|
||||
@@ -82,6 +84,9 @@ const fileSchema = mongoose.Schema(
|
||||
type: String,
|
||||
default: FileSources.local,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
},
|
||||
width: Number,
|
||||
height: Number,
|
||||
expiresAt: {
|
||||
@@ -94,4 +99,6 @@ const fileSchema = mongoose.Schema(
|
||||
},
|
||||
);
|
||||
|
||||
fileSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = fileSchema;
|
||||
|
||||
@@ -54,7 +54,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||
prompt &&
|
||||
completion &&
|
||||
logger.debug('[spendTokens] Transaction data record against balance:', {
|
||||
user: prompt.user,
|
||||
user: txData.user,
|
||||
prompt: prompt.prompt,
|
||||
promptRate: prompt.rate,
|
||||
completion: completion.completion,
|
||||
|
||||
@@ -3,6 +3,7 @@ const defaultRate = 6;
|
||||
|
||||
/**
|
||||
* Mapping of model token sizes to their respective multipliers for prompt and completion.
|
||||
* The rates are 1 USD per 1M tokens.
|
||||
* @type {Object.<string, {prompt: number, completion: number}>}
|
||||
*/
|
||||
const tokenValues = {
|
||||
@@ -19,6 +20,15 @@ const tokenValues = {
|
||||
'claude-2.1': { prompt: 8, completion: 24 },
|
||||
'claude-2': { prompt: 8, completion: 24 },
|
||||
'claude-': { prompt: 0.8, completion: 2.4 },
|
||||
'command-r-plus': { prompt: 3, completion: 15 },
|
||||
'command-r': { prompt: 0.5, completion: 1.5 },
|
||||
/* cohere doesn't have rates for the older command models,
|
||||
so this was from https://artificialanalysis.ai/models/command-light/providers */
|
||||
command: { prompt: 0.38, completion: 0.38 },
|
||||
// 'gemini-1.5': { prompt: 7, completion: 21 }, // May 2nd, 2024 pricing
|
||||
// 'gemini': { prompt: 0.5, completion: 1.5 }, // May 2nd, 2024 pricing
|
||||
'gemini-1.5': { prompt: 0, completion: 0 }, // currently free
|
||||
gemini: { prompt: 0, completion: 0 }, // currently free
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -42,6 +52,8 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-3.5-turbo-1106';
|
||||
} else if (modelName.includes('gpt-3.5')) {
|
||||
return '4k';
|
||||
} else if (modelName.includes('gpt-4-vision')) {
|
||||
return 'gpt-4-1106';
|
||||
} else if (modelName.includes('gpt-4-1106')) {
|
||||
return 'gpt-4-1106';
|
||||
} else if (modelName.includes('gpt-4-0125')) {
|
||||
|
||||
@@ -34,6 +34,13 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-1106/openai/')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
it('should return "gpt-4-1106" for model type of "gpt-4-1106"', () => {
|
||||
expect(getValueKey('gpt-4-vision-preview')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('openai/gpt-4-1106')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-turbo')).toBe('gpt-4-1106');
|
||||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMultiplier', () => {
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "0.6.10",
|
||||
"version": "0.7.1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
"server-dev": "echo 'please run this from the root directory'",
|
||||
"test": "cross-env NODE_ENV=test jest",
|
||||
"b:test": "NODE_ENV=test bun jest",
|
||||
"test:ci": "jest --ci"
|
||||
"test:ci": "jest --ci",
|
||||
"add-balance": "node ./add-balance.js",
|
||||
"list-balances": "node ./list-balances.js",
|
||||
"user-stats": "node ./user-stats.js",
|
||||
"create-user": "node ./create-user.js",
|
||||
"ban-user": "node ./ban-user.js",
|
||||
"delete-user": "node ./delete-user.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -29,14 +35,16 @@
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.16.1",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@google/generative-ai": "^0.5.0",
|
||||
"@keyv/mongo": "^2.1.8",
|
||||
"@keyv/redis": "^2.8.1",
|
||||
"@langchain/community": "^0.0.17",
|
||||
"@langchain/google-genai": "^0.0.8",
|
||||
"@langchain/community": "^0.0.46",
|
||||
"@langchain/google-genai": "^0.0.11",
|
||||
"@langchain/google-vertexai": "^0.0.5",
|
||||
"axios": "^1.3.4",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"cohere-ai": "^6.0.0",
|
||||
"cohere-ai": "^7.9.1",
|
||||
"connect-redis": "^7.1.0",
|
||||
"cookie": "^0.5.0",
|
||||
"cors": "^2.8.5",
|
||||
@@ -46,7 +54,7 @@
|
||||
"express-rate-limit": "^6.9.0",
|
||||
"express-session": "^1.17.3",
|
||||
"file-type": "^18.7.0",
|
||||
"firebase": "^10.8.0",
|
||||
"firebase": "^10.6.0",
|
||||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"html": "^1.0.0",
|
||||
@@ -59,14 +67,14 @@
|
||||
"langchain": "^0.0.214",
|
||||
"librechat-data-provider": "*",
|
||||
"lodash": "^4.17.21",
|
||||
"meilisearch": "^0.37.0",
|
||||
"meilisearch": "^0.38.0",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^7.1.1",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"nodejs-gpt": "^1.37.4",
|
||||
"nodemailer": "^6.9.4",
|
||||
"openai": "^4.29.0",
|
||||
"openai": "4.36.0",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^5.4.2",
|
||||
"passport": "^0.6.0",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { getResponseSender, Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage, getConvo } = require('~/models');
|
||||
@@ -48,7 +48,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
|
||||
try {
|
||||
const { client } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: throttle(
|
||||
({ text: partialText }) => {
|
||||
@@ -59,7 +59,7 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: partialText,
|
||||
model: client.modelOptions.model,
|
||||
unfinished: true,
|
||||
unfinished,
|
||||
error: false,
|
||||
user,
|
||||
});
|
||||
|
||||
@@ -76,14 +76,14 @@ const refreshController = async (req, res) => {
|
||||
}
|
||||
|
||||
try {
|
||||
let payload;
|
||||
payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const userId = payload.id;
|
||||
const user = await User.findOne({ _id: userId });
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await User.findOne({ _id: payload.id });
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
|
||||
const userId = payload.id;
|
||||
|
||||
if (process.env.NODE_ENV === 'CI') {
|
||||
const token = await setAuthTokens(userId, res);
|
||||
const userObj = user.toJSON();
|
||||
@@ -118,6 +118,6 @@ module.exports = {
|
||||
getUserController,
|
||||
refreshController,
|
||||
registrationController,
|
||||
resetPasswordRequestController,
|
||||
resetPasswordController,
|
||||
resetPasswordRequestController,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const throttle = require('lodash/throttle');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const { getResponseSender, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { createAbortController, handleAbortError } = require('~/server/middleware');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage, getConvo } = require('~/models');
|
||||
@@ -48,6 +48,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
}
|
||||
};
|
||||
|
||||
const unfinished = endpointOption.endpoint === EModelEndpoint.google ? false : true;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
generation,
|
||||
onProgress: throttle(
|
||||
@@ -59,7 +60,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
parentMessageId: overrideParentMessageId ?? userMessageId,
|
||||
text: partialText,
|
||||
model: endpointOption.modelOptions.model,
|
||||
unfinished: true,
|
||||
unfinished,
|
||||
isEdited: true,
|
||||
error: false,
|
||||
user,
|
||||
|
||||
@@ -6,6 +6,7 @@ const axios = require('axios');
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
@@ -43,7 +44,8 @@ const startServer = async () => {
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(express.static(app.locals.paths.dist));
|
||||
app.use(express.static(app.locals.paths.publicPath));
|
||||
app.use(express.static(app.locals.paths.fonts));
|
||||
app.use(express.static(app.locals.paths.assets));
|
||||
app.set('trust proxy', 1); // trust first proxy
|
||||
app.use(cors());
|
||||
|
||||
@@ -82,6 +84,7 @@ const startServer = async () => {
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/assistants', routes.assistants);
|
||||
app.use('/api/files', await routes.files.initialize());
|
||||
app.use('/images/', validateImageRequest, routes.staticRoute);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
const abortControllers = require('./abortControllers');
|
||||
const { redactMessage } = require('~/config/parsers');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
const { abortRun } = require('./abortRun');
|
||||
const { logger } = require('~/config');
|
||||
@@ -100,7 +100,15 @@ const createAbortController = (req, res, getAbortData) => {
|
||||
};
|
||||
|
||||
const handleAbortError = async (res, req, error, data) => {
|
||||
logger.error('[handleAbortError] AI response error; aborting request:', error);
|
||||
if (error?.message?.includes('base64')) {
|
||||
logger.error('[handleAbortError] Error in base64 encoding', {
|
||||
...error,
|
||||
stack: smartTruncateText(error?.stack, 1000),
|
||||
message: truncateText(error.message, 350),
|
||||
});
|
||||
} else {
|
||||
logger.error('[handleAbortError] AI response error; aborting request:', error);
|
||||
}
|
||||
const { sender, conversationId, messageId, parentMessageId, partialText } = data;
|
||||
|
||||
if (error.stack && error.stack.includes('google')) {
|
||||
@@ -109,13 +117,17 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
);
|
||||
}
|
||||
|
||||
const errorText = error?.message?.includes('"type"')
|
||||
? error.message
|
||||
: 'An error occurred while processing your request. Please contact the Admin.';
|
||||
|
||||
const respondWithError = async (partialText) => {
|
||||
let options = {
|
||||
sender,
|
||||
messageId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
text: redactMessage(error.message),
|
||||
text: errorText,
|
||||
shouldSaveMessage: true,
|
||||
user: req.user.id,
|
||||
};
|
||||
|
||||
@@ -75,7 +75,6 @@ async function abortRun(req, res) {
|
||||
});
|
||||
|
||||
const finalEvent = {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
runMessages,
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
const Keyv = require('keyv');
|
||||
const uap = require('ua-parser-js');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('../../cache');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, removePorts } = require('../utils');
|
||||
const keyvRedis = require('../../cache/keyvRedis');
|
||||
const User = require('../../models/User');
|
||||
const keyvRedis = require('~/cache/keyvRedis');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const User = require('~/models/User');
|
||||
|
||||
const banCache = isEnabled(process.env.USE_REDIS)
|
||||
? new Keyv({ store: keyvRedis })
|
||||
: new Keyv({ namespace: 'bans', ttl: 0 });
|
||||
: new Keyv({ namespace: ViolationTypes.BAN, ttl: 0 });
|
||||
const message = 'Your account has been temporarily banned due to violations of our service.';
|
||||
|
||||
/**
|
||||
@@ -28,7 +29,7 @@ const banResponse = async (req, res) => {
|
||||
if (!ua.browser.name) {
|
||||
return res.status(403).json({ message });
|
||||
} else if (baseUrl === '/api/ask' || baseUrl === '/api/edit') {
|
||||
return await denyRequest(req, res, { type: 'ban' });
|
||||
return await denyRequest(req, res, { type: ViolationTypes.BAN });
|
||||
}
|
||||
|
||||
return res.status(403).json({ message });
|
||||
@@ -87,7 +88,7 @@ const checkBan = async (req, res, next = () => {}) => {
|
||||
return await banResponse(req, res);
|
||||
}
|
||||
|
||||
const banLogs = getLogStores('ban');
|
||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||
const duration = banLogs.opts.ttl;
|
||||
|
||||
if (duration <= 0) {
|
||||
|
||||
@@ -14,6 +14,7 @@ const concurrentLimiter = require('./concurrentLimiter');
|
||||
const validateMessageReq = require('./validateMessageReq');
|
||||
const buildEndpointOption = require('./buildEndpointOption');
|
||||
const validateRegistration = require('./validateRegistration');
|
||||
const validateImageRequest = require('./validateImageRequest');
|
||||
const moderateText = require('./moderateText');
|
||||
const noIndex = require('./noIndex');
|
||||
|
||||
@@ -33,6 +34,7 @@ module.exports = {
|
||||
validateMessageReq,
|
||||
buildEndpointOption,
|
||||
validateRegistration,
|
||||
validateImageRequest,
|
||||
validateModel,
|
||||
moderateText,
|
||||
noIndex,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const axios = require('axios');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function moderateText(req, res, next) {
|
||||
if (process.env.OPENAI_MODERATION === 'true') {
|
||||
@@ -23,12 +25,12 @@ async function moderateText(req, res, next) {
|
||||
const flagged = results.some((result) => result.flagged);
|
||||
|
||||
if (flagged) {
|
||||
const type = 'moderation';
|
||||
const type = ErrorTypes.MODERATION;
|
||||
const errorMessage = { type };
|
||||
return await denyRequest(req, res, errorMessage);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error in moderateText:', error);
|
||||
logger.error('Error in moderateText:', error);
|
||||
const errorMessage = 'error in moderation check';
|
||||
return await denyRequest(req, res, errorMessage);
|
||||
}
|
||||
|
||||
42
api/server/middleware/validateImageRequest.js
Normal file
42
api/server/middleware/validateImageRequest.js
Normal file
@@ -0,0 +1,42 @@
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Middleware to validate image request.
|
||||
* Must be set by `secureImageLinks` via custom config file.
|
||||
*/
|
||||
function validateImageRequest(req, res, next) {
|
||||
if (!req.app.locals.secureImageLinks) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
if (!refreshToken) {
|
||||
logger.warn('[validateImageRequest] Refresh token not provided');
|
||||
return res.status(401).send('Unauthorized');
|
||||
}
|
||||
|
||||
let payload;
|
||||
try {
|
||||
payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
} catch (err) {
|
||||
logger.warn('[validateImageRequest]', err);
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const currentTimeInSeconds = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp < currentTimeInSeconds) {
|
||||
logger.warn('[validateImageRequest] Refresh token expired');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (req.path.includes(payload.id)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
next();
|
||||
} else {
|
||||
res.status(403).send('Access Denied');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = validateImageRequest;
|
||||
@@ -1,10 +1,11 @@
|
||||
const { v4 } = require('uuid');
|
||||
const express = require('express');
|
||||
const { actionDelimiter } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||
const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { updateAssistant, getAssistant } = require('~/models/Assistant');
|
||||
const { withSession } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -46,7 +47,7 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
|
||||
let { domain } = metadata;
|
||||
/* Azure doesn't support periods in function names */
|
||||
domain = domainParser(req, domain, true);
|
||||
domain = await domainParser(req, domain, true);
|
||||
|
||||
if (!domain) {
|
||||
return res.status(400).json({ message: 'No domain provided' });
|
||||
@@ -110,7 +111,8 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
|
||||
const promises = [];
|
||||
promises.push(
|
||||
updateAssistant(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
{ assistant_id },
|
||||
{
|
||||
actions,
|
||||
@@ -119,7 +121,9 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools }));
|
||||
promises.push(updateAction({ action_id }, { metadata, assistant_id, user: req.user.id }));
|
||||
promises.push(
|
||||
withSession(updateAction, { action_id }, { metadata, assistant_id, user: req.user.id }),
|
||||
);
|
||||
|
||||
/** @type {[AssistantDocument, Assistant, Action]} */
|
||||
const resolved = await Promise.all(promises);
|
||||
@@ -129,6 +133,15 @@ router.post('/:assistant_id', async (req, res) => {
|
||||
delete resolved[2].metadata[field];
|
||||
}
|
||||
}
|
||||
|
||||
/* Map Azure OpenAI model to the assistant as defined by config */
|
||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
resolved[1] = {
|
||||
...resolved[1],
|
||||
model: req.body.model,
|
||||
};
|
||||
}
|
||||
|
||||
res.json(resolved);
|
||||
} catch (error) {
|
||||
const message = 'Trouble updating the Assistant Action';
|
||||
@@ -171,7 +184,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
return true;
|
||||
});
|
||||
|
||||
domain = domainParser(req, domain, true);
|
||||
domain = await domainParser(req, domain, true);
|
||||
|
||||
const updatedTools = tools.filter(
|
||||
(tool) => !(tool.function && tool.function.name.includes(domain)),
|
||||
@@ -179,7 +192,8 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
|
||||
const promises = [];
|
||||
promises.push(
|
||||
updateAssistant(
|
||||
withSession(
|
||||
updateAssistant,
|
||||
{ assistant_id },
|
||||
{
|
||||
actions: updatedActions,
|
||||
@@ -188,7 +202,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||
),
|
||||
);
|
||||
promises.push(openai.beta.assistants.update(assistant_id, { tools: updatedTools }));
|
||||
promises.push(deleteAction({ action_id }));
|
||||
promises.push(withSession(deleteAction, { action_id }));
|
||||
|
||||
await Promise.all(promises);
|
||||
res.status(200).json({ message: 'Action deleted successfully' });
|
||||
|
||||
@@ -213,7 +213,13 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
||||
/** @type {{ openai: OpenAI }} */
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
|
||||
const image = await uploadImageBuffer({ req, context: FileContext.avatar });
|
||||
const image = await uploadImageBuffer({
|
||||
req,
|
||||
context: FileContext.avatar,
|
||||
metadata: {
|
||||
buffer: req.file.buffer,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
_metadata = JSON.parse(_metadata);
|
||||
|
||||
@@ -247,7 +247,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
}
|
||||
|
||||
finalEvent = {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
runMessages,
|
||||
@@ -477,7 +476,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
|
||||
conversation = {
|
||||
conversationId,
|
||||
title: 'New Chat',
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
promptPrefix: promptPrefix,
|
||||
instructions: instructions,
|
||||
@@ -597,7 +595,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
|
||||
/** @type {ResponseMessage} */
|
||||
const responseMessage = {
|
||||
...response.finalMessage,
|
||||
...(response.responseMessage ?? response.finalMessage),
|
||||
parentMessageId: userMessageId,
|
||||
conversationId,
|
||||
user: req.user.id,
|
||||
@@ -607,7 +605,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
title: 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: {
|
||||
|
||||
@@ -18,13 +18,15 @@ router.post('/', upload.single('input'), async (req, res) => {
|
||||
}
|
||||
|
||||
const fileStrategy = req.app.locals.fileStrategy;
|
||||
const webPBuffer = await resizeAvatar({
|
||||
const desiredFormat = req.app.locals.imageOutputType;
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
input,
|
||||
desiredFormat,
|
||||
});
|
||||
|
||||
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||
const url = await processAvatar({ buffer: webPBuffer, userId, manual });
|
||||
const url = await processAvatar({ buffer: resizedBuffer, userId, manual });
|
||||
|
||||
res.json({ url });
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const axios = require('axios');
|
||||
const fs = require('fs').promises;
|
||||
const express = require('express');
|
||||
const { isUUID } = require('librechat-data-provider');
|
||||
const { isUUID, FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
filterFile,
|
||||
processFileUpload,
|
||||
processDeleteRequest,
|
||||
} = require('~/server/services/Files/process');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -65,28 +66,65 @@ router.delete('/', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/download/:fileId', async (req, res) => {
|
||||
router.get('/download/:userId/:file_id', async (req, res) => {
|
||||
try {
|
||||
const { fileId } = req.params;
|
||||
const { userId, file_id } = req.params;
|
||||
logger.debug(`File download requested by user ${userId}: ${file_id}`);
|
||||
|
||||
const options = {
|
||||
headers: {
|
||||
// TODO: Client initialization for OpenAI API Authentication
|
||||
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`,
|
||||
},
|
||||
responseType: 'stream',
|
||||
if (userId !== req.user.id) {
|
||||
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
|
||||
return res.status(403).send('Forbidden');
|
||||
}
|
||||
|
||||
const [file] = await getFiles({ file_id });
|
||||
const errorPrefix = `File download requested by user ${userId}`;
|
||||
|
||||
if (!file) {
|
||||
logger.warn(`${errorPrefix} not found: ${file_id}`);
|
||||
return res.status(404).send('File not found');
|
||||
}
|
||||
|
||||
if (!file.filepath.includes(userId)) {
|
||||
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
|
||||
return res.status(403).send('Forbidden');
|
||||
}
|
||||
|
||||
if (file.source === FileSources.openai && !file.model) {
|
||||
logger.warn(`${errorPrefix} has no associated model: ${file_id}`);
|
||||
return res.status(400).send('The model used when creating this file is not available');
|
||||
}
|
||||
|
||||
const { getDownloadStream } = getStrategyFunctions(file.source);
|
||||
if (!getDownloadStream) {
|
||||
logger.warn(`${errorPrefix} has no stream method implemented: ${file.source}`);
|
||||
return res.status(501).send('Not Implemented');
|
||||
}
|
||||
|
||||
const setHeaders = () => {
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${file.filename}"`);
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
res.setHeader('X-File-Metadata', JSON.stringify(file));
|
||||
};
|
||||
|
||||
const fileResponse = await axios.get(`https://api.openai.com/v1/files/${fileId}`, {
|
||||
headers: options.headers,
|
||||
});
|
||||
const { filename } = fileResponse.data;
|
||||
|
||||
const response = await axios.get(`https://api.openai.com/v1/files/${fileId}/content`, options);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`);
|
||||
response.data.pipe(res);
|
||||
/** @type {{ body: import('stream').PassThrough } | undefined} */
|
||||
let passThrough;
|
||||
/** @type {ReadableStream | undefined} */
|
||||
let fileStream;
|
||||
if (file.source === FileSources.openai) {
|
||||
req.body = { model: file.model };
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
logger.debug(`Downloading file ${file_id} from OpenAI`);
|
||||
passThrough = await getDownloadStream(file_id, openai);
|
||||
setHeaders();
|
||||
logger.debug(`File ${file_id} downloaded from OpenAI`);
|
||||
passThrough.body.pipe(res);
|
||||
} else {
|
||||
fileStream = getDownloadStream(file_id);
|
||||
setHeaders();
|
||||
fileStream.pipe(res);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error downloading file:', error);
|
||||
logger.error('Error downloading file:', error);
|
||||
res.status(500).send('Error downloading file');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@ const storage = multer.diskStorage({
|
||||
},
|
||||
filename: function (req, file, cb) {
|
||||
req.file_id = crypto.randomUUID();
|
||||
file.originalname = decodeURIComponent(file.originalname);
|
||||
cb(null, `${file.originalname}`);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -17,6 +17,7 @@ const user = require('./user');
|
||||
const config = require('./config');
|
||||
const assistants = require('./assistants');
|
||||
const files = require('./files');
|
||||
const staticRoute = require('./static');
|
||||
|
||||
module.exports = {
|
||||
search,
|
||||
@@ -38,4 +39,5 @@ module.exports = {
|
||||
config,
|
||||
assistants,
|
||||
files,
|
||||
staticRoute,
|
||||
};
|
||||
|
||||
7
api/server/routes/static.js
Normal file
7
api/server/routes/static.js
Normal file
@@ -0,0 +1,7 @@
|
||||
const express = require('express');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
const router = express.Router();
|
||||
router.use(express.static(paths.imageOutput));
|
||||
|
||||
module.exports = router;
|
||||
@@ -1,20 +1,27 @@
|
||||
const { AuthTypeEnum, EModelEndpoint, actionDomainSeparator } = require('librechat-data-provider');
|
||||
const {
|
||||
AuthTypeEnum,
|
||||
EModelEndpoint,
|
||||
actionDomainSeparator,
|
||||
CacheKeys,
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
|
||||
const { getActions } = require('~/models/Action');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Parses the domain for an action.
|
||||
* Encodes or decodes a domain name to/from base64, or replacing periods with a custom separator.
|
||||
*
|
||||
* Azure OpenAI Assistants API doesn't support periods in function
|
||||
* names due to `[a-zA-Z0-9_-]*` Regex Validation.
|
||||
* Necessary because Azure OpenAI Assistants API doesn't support periods in function
|
||||
* names due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum.
|
||||
*
|
||||
* @param {Express.Request} req - Express Request object
|
||||
* @param {string} domain - The domain for the actoin
|
||||
* @param {boolean} inverse - If true, replaces periods with `actionDomainSeparator`
|
||||
* @returns {string} The parsed domain
|
||||
* @param {Express.Request} req - The Express Request object.
|
||||
* @param {string} domain - The domain name to encode/decode.
|
||||
* @param {boolean} inverse - False to decode from base64, true to encode to base64.
|
||||
* @returns {Promise<string>} Encoded or decoded domain string.
|
||||
*/
|
||||
function domainParser(req, domain, inverse = false) {
|
||||
async function domainParser(req, domain, inverse = false) {
|
||||
if (!domain) {
|
||||
return;
|
||||
}
|
||||
@@ -23,11 +30,35 @@ function domainParser(req, domain, inverse = false) {
|
||||
return domain;
|
||||
}
|
||||
|
||||
if (inverse) {
|
||||
const domainsCache = getLogStores(CacheKeys.ENCODED_DOMAINS);
|
||||
const cachedDomain = await domainsCache.get(domain);
|
||||
if (inverse && cachedDomain) {
|
||||
return domain;
|
||||
}
|
||||
|
||||
if (inverse && domain.length <= Constants.ENCODED_DOMAIN_LENGTH) {
|
||||
return domain.replace(/\./g, actionDomainSeparator);
|
||||
}
|
||||
|
||||
return domain.replace(actionDomainSeparator, '.');
|
||||
if (inverse) {
|
||||
const modifiedDomain = Buffer.from(domain).toString('base64');
|
||||
const key = modifiedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
await domainsCache.set(key, modifiedDomain);
|
||||
return key;
|
||||
}
|
||||
|
||||
const replaceSeparatorRegex = new RegExp(actionDomainSeparator, 'g');
|
||||
|
||||
if (!cachedDomain) {
|
||||
return domain.replace(replaceSeparatorRegex, '.');
|
||||
}
|
||||
|
||||
try {
|
||||
return Buffer.from(cachedDomain, 'base64').toString('utf-8');
|
||||
} catch (error) {
|
||||
logger.error(`Failed to parse domain (possibly not base64): ${domain}`, error);
|
||||
return domain;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
196
api/server/services/ActionService.spec.js
Normal file
196
api/server/services/ActionService.spec.js
Normal file
@@ -0,0 +1,196 @@
|
||||
const { Constants, EModelEndpoint, actionDomainSeparator } = require('librechat-data-provider');
|
||||
const { domainParser } = require('./ActionService');
|
||||
|
||||
jest.mock('keyv');
|
||||
|
||||
const globalCache = {};
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
const EventEmitter = require('events');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
class KeyvMongo extends EventEmitter {
|
||||
constructor(url = 'mongodb://127.0.0.1:27017', options) {
|
||||
super();
|
||||
this.ttlSupport = false;
|
||||
url = url ?? {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
this.opts = {
|
||||
url,
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
}
|
||||
|
||||
get = async (key) => {
|
||||
return new Promise((resolve) => {
|
||||
resolve(globalCache[key] || null);
|
||||
});
|
||||
};
|
||||
|
||||
set = async (key, value) => {
|
||||
return new Promise((resolve) => {
|
||||
globalCache[key] = value;
|
||||
resolve(true);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
return new KeyvMongo('', {
|
||||
namespace: CacheKeys.ENCODED_DOMAINS,
|
||||
ttl: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('domainParser', () => {
|
||||
const req = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
assistants: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const reqNoAzure = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
assistants: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const TLD = '.com';
|
||||
|
||||
// Non-azure request
|
||||
it('returns domain as is if not azure', async () => {
|
||||
const domain = `example.com${actionDomainSeparator}test${actionDomainSeparator}`;
|
||||
const result1 = await domainParser(reqNoAzure, domain, false);
|
||||
const result2 = await domainParser(reqNoAzure, domain, true);
|
||||
expect(result1).toEqual(domain);
|
||||
expect(result2).toEqual(domain);
|
||||
});
|
||||
|
||||
// Test for Empty or Null Inputs
|
||||
it('returns undefined for null domain input', async () => {
|
||||
const result = await domainParser(req, null, true);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns undefined for empty domain input', async () => {
|
||||
const result = await domainParser(req, '', true);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
// Verify Correct Caching Behavior
|
||||
it('caches encoded domain correctly', async () => {
|
||||
const domain = 'longdomainname.com';
|
||||
const encodedDomain = Buffer.from(domain)
|
||||
.toString('base64')
|
||||
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
|
||||
await domainParser(req, domain, true);
|
||||
|
||||
const cachedValue = await globalCache[encodedDomain];
|
||||
expect(cachedValue).toEqual(Buffer.from(domain).toString('base64'));
|
||||
});
|
||||
|
||||
// Test for Edge Cases Around Length Threshold
|
||||
it('encodes domain exactly at threshold without modification', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - TLD.length) + TLD;
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it('encodes domain just below threshold without modification', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - 1 - TLD.length) + TLD;
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
// Test for Unicode Domain Names
|
||||
it('handles unicode characters in domain names correctly when encoding', async () => {
|
||||
const unicodeDomain = 'täst.example.com';
|
||||
const encodedDomain = Buffer.from(unicodeDomain)
|
||||
.toString('base64')
|
||||
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
|
||||
const result = await domainParser(req, unicodeDomain, true);
|
||||
expect(result).toEqual(encodedDomain);
|
||||
});
|
||||
|
||||
it('decodes unicode domain names correctly', async () => {
|
||||
const unicodeDomain = 'täst.example.com';
|
||||
const encodedDomain = Buffer.from(unicodeDomain).toString('base64');
|
||||
globalCache[encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH)] = encodedDomain; // Simulate caching
|
||||
|
||||
const result = await domainParser(
|
||||
req,
|
||||
encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH),
|
||||
false,
|
||||
);
|
||||
expect(result).toEqual(unicodeDomain);
|
||||
});
|
||||
|
||||
// Core Functionality Tests
|
||||
it('returns domain with replaced separators if no cached domain exists', async () => {
|
||||
const domain = 'example.com';
|
||||
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, withSeparator, false);
|
||||
expect(result).toEqual(domain);
|
||||
});
|
||||
|
||||
it('returns domain with replaced separators when inverse is false and under encoding length', async () => {
|
||||
const domain = 'examp.com';
|
||||
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, withSeparator, false);
|
||||
expect(result).toEqual(domain);
|
||||
});
|
||||
|
||||
it('replaces periods with actionDomainSeparator when inverse is true and under encoding length', async () => {
|
||||
const domain = 'examp.com';
|
||||
const expected = domain.replace(/\./g, actionDomainSeparator);
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
|
||||
it('encodes domain when length is above threshold and inverse is true', async () => {
|
||||
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH + 1).concat('.com');
|
||||
const result = await domainParser(req, domain, true);
|
||||
expect(result).not.toEqual(domain);
|
||||
expect(result.length).toBeLessThanOrEqual(Constants.ENCODED_DOMAIN_LENGTH);
|
||||
});
|
||||
|
||||
it('returns encoded value if no encoded value is cached, and inverse is false', async () => {
|
||||
const originalDomain = 'example.com';
|
||||
const encodedDomain = Buffer.from(
|
||||
originalDomain.replace(/\./g, actionDomainSeparator),
|
||||
).toString('base64');
|
||||
const result = await domainParser(req, encodedDomain, false);
|
||||
expect(result).toEqual(encodedDomain);
|
||||
});
|
||||
|
||||
it('decodes encoded value if cached and encoded value is provided, and inverse is false', async () => {
|
||||
const originalDomain = 'example.com';
|
||||
const encodedDomain = await domainParser(req, originalDomain, true);
|
||||
const result = await domainParser(req, encodedDomain, false);
|
||||
expect(result).toEqual(originalDomain);
|
||||
});
|
||||
|
||||
it('handles invalid base64 encoded values gracefully', async () => {
|
||||
const invalidBase64Domain = 'not_base64_encoded';
|
||||
const result = await domainParser(req, invalidBase64Domain, false);
|
||||
expect(result).toEqual(invalidBase64Domain);
|
||||
});
|
||||
});
|
||||
@@ -1,21 +1,17 @@
|
||||
const {
|
||||
Constants,
|
||||
FileSources,
|
||||
Capabilities,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
defaultSocialLogins,
|
||||
validateAzureGroups,
|
||||
mapModelToAzureConfig,
|
||||
assistantEndpointSchema,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkVariables, checkHealth, checkConfig, checkAzureVariables } = require('./start/checks');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const handleRateLimits = require('./Config/handleRateLimits');
|
||||
const { azureConfigSetup } = require('./start/azureOpenAI');
|
||||
const { loadAndFormatTools } = require('./ToolService');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -28,8 +24,12 @@ const AppService = async (app) => {
|
||||
const config = (await loadCustomConfig()) ?? {};
|
||||
|
||||
const fileStrategy = config.fileStrategy ?? FileSources.local;
|
||||
const imageOutputType = config?.imageOutputType ?? EImageOutputType.PNG;
|
||||
process.env.CDN_PROVIDER = fileStrategy;
|
||||
|
||||
checkVariables();
|
||||
await checkHealth();
|
||||
|
||||
if (fileStrategy === FileSources.firebase) {
|
||||
initializeFirebase();
|
||||
}
|
||||
@@ -50,119 +50,46 @@ const AppService = async (app) => {
|
||||
|
||||
if (!Object.keys(config).length) {
|
||||
app.locals = {
|
||||
availableTools,
|
||||
paths,
|
||||
fileStrategy,
|
||||
socialLogins,
|
||||
paths,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
};
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (config.version !== Constants.CONFIG_VERSION) {
|
||||
logger.info(
|
||||
`\nOutdated Config version: ${config.version}. Current version: ${Constants.CONFIG_VERSION}\n\nCheck out the latest config file guide for new options and features.\nhttps://docs.librechat.ai/install/configuration/custom_config.html\n\n`,
|
||||
);
|
||||
}
|
||||
|
||||
checkConfig(config);
|
||||
handleRateLimits(config?.rateLimits);
|
||||
|
||||
const endpointLocals = {};
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]) {
|
||||
const { groups, ...azureConfiguration } = config.endpoints[EModelEndpoint.azureOpenAI];
|
||||
const { isValid, modelNames, modelGroupMap, groupMap, errors } = validateAzureGroups(groups);
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = azureConfigSetup(config);
|
||||
checkAzureVariables();
|
||||
}
|
||||
|
||||
if (!isValid) {
|
||||
const errorString = errors.join('\n');
|
||||
const errorMessage = 'Invalid Azure OpenAI configuration:\n' + errorString;
|
||||
logger.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const assistantModels = [];
|
||||
const assistantGroups = new Set();
|
||||
for (const modelName of modelNames) {
|
||||
mapModelToAzureConfig({ modelName, modelGroupMap, groupMap });
|
||||
const groupName = modelGroupMap?.[modelName]?.group;
|
||||
const modelGroup = groupMap?.[groupName];
|
||||
let supportsAssistants = modelGroup?.assistants || modelGroup?.[modelName]?.assistants;
|
||||
if (supportsAssistants) {
|
||||
assistantModels.push(modelName);
|
||||
!assistantGroups.has(groupName) && assistantGroups.add(groupName);
|
||||
}
|
||||
}
|
||||
|
||||
if (azureConfiguration.assistants && assistantModels.length === 0) {
|
||||
throw new Error(
|
||||
'No Azure models are configured to support assistants. Please remove the `assistants` field or configure at least one model to support assistants.',
|
||||
);
|
||||
}
|
||||
|
||||
endpointLocals[EModelEndpoint.azureOpenAI] = {
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
assistantModels,
|
||||
assistantGroups: Array.from(assistantGroups),
|
||||
...azureConfiguration,
|
||||
};
|
||||
|
||||
deprecatedAzureVariables.forEach(({ key, description }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable (related to ${description}) should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you will experience conflicts and errors.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
conflictingAzureVariables.forEach(({ key }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you may experience with the defined placeholders for mapping to the current model grouping using the same name.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
if (azureConfiguration.assistants) {
|
||||
endpointLocals[EModelEndpoint.assistants] = {
|
||||
// Note: may need to add retrieval models here in the future
|
||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||
};
|
||||
}
|
||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
endpointLocals[EModelEndpoint.assistants] = azureAssistantsDefaults();
|
||||
}
|
||||
|
||||
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
||||
const assistantsConfig = config.endpoints[EModelEndpoint.assistants];
|
||||
const parsedConfig = assistantEndpointSchema.parse(assistantsConfig);
|
||||
if (assistantsConfig.supportedIds?.length && assistantsConfig.excludedIds?.length) {
|
||||
logger.warn(
|
||||
`Both \`supportedIds\` and \`excludedIds\` are defined for the ${EModelEndpoint.assistants} endpoint; \`excludedIds\` field will be ignored.`,
|
||||
);
|
||||
}
|
||||
|
||||
const prevConfig = endpointLocals[EModelEndpoint.assistants] ?? {};
|
||||
|
||||
/** @type {Partial<TAssistantEndpoint>} */
|
||||
endpointLocals[EModelEndpoint.assistants] = {
|
||||
...prevConfig,
|
||||
retrievalModels: parsedConfig.retrievalModels,
|
||||
disableBuilder: parsedConfig.disableBuilder,
|
||||
pollIntervalMs: parsedConfig.pollIntervalMs,
|
||||
supportedIds: parsedConfig.supportedIds,
|
||||
capabilities: parsedConfig.capabilities,
|
||||
excludedIds: parsedConfig.excludedIds,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
};
|
||||
endpointLocals[EModelEndpoint.assistants] = assistantsConfigSetup(
|
||||
config,
|
||||
endpointLocals[EModelEndpoint.assistants],
|
||||
);
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
socialLogins,
|
||||
availableTools,
|
||||
fileStrategy,
|
||||
fileConfig: config?.fileConfig,
|
||||
interface: config?.interface,
|
||||
paths,
|
||||
socialLogins,
|
||||
fileStrategy,
|
||||
availableTools,
|
||||
imageOutputType,
|
||||
interface: config?.interface,
|
||||
fileConfig: config?.fileConfig,
|
||||
secureImageLinks: config?.secureImageLinks,
|
||||
...endpointLocals,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
defaultSocialLogins,
|
||||
validateAzureGroups,
|
||||
deprecatedAzureVariables,
|
||||
@@ -107,6 +108,10 @@ describe('AppService', () => {
|
||||
},
|
||||
},
|
||||
paths: expect.anything(),
|
||||
imageOutputType: expect.any(String),
|
||||
interface: undefined,
|
||||
fileConfig: undefined,
|
||||
secureImageLinks: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -125,6 +130,36 @@ describe('AppService', () => {
|
||||
expect(logger.info).toHaveBeenCalledWith(expect.stringContaining('Outdated Config version'));
|
||||
});
|
||||
|
||||
it('should change the `imageOutputType` based on config value', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
version: '0.10.0',
|
||||
imageOutputType: EImageOutputType.WEBP,
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.WEBP);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided type', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
version: '0.10.0',
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.PNG);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided config', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(undefined));
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals.imageOutputType).toEqual(EImageOutputType.PNG);
|
||||
});
|
||||
|
||||
it('should initialize Firebase when fileStrategy is firebase', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
@@ -193,6 +228,27 @@ describe('AppService', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure minimum Azure OpenAI Assistant values', async () => {
|
||||
const assistantGroups = [azureGroups[0], { ...azureGroups[1], assistants: true }];
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: assistantGroups,
|
||||
assistants: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
process.env.WESTUS_API_KEY = 'westus-key';
|
||||
process.env.EASTUS_API_KEY = 'eastus-key';
|
||||
|
||||
await AppService(app);
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.assistants);
|
||||
expect(app.locals[EModelEndpoint.assistants].capabilities.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should correctly configure Azure OpenAI endpoint based on custom config', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const path = require('path');
|
||||
const { klona } = require('klona');
|
||||
const {
|
||||
StepTypes,
|
||||
@@ -233,14 +232,9 @@ function createInProgressHandler(openai, thread_id, messages) {
|
||||
file_id,
|
||||
basename: `${file_id}.png`,
|
||||
});
|
||||
// toolCall.asset_pointer = file.filepath;
|
||||
const prelimImage = {
|
||||
file_id,
|
||||
filename: path.basename(file.filepath),
|
||||
filepath: file.filepath,
|
||||
height: file.height,
|
||||
width: file.width,
|
||||
};
|
||||
|
||||
const prelimImage = file;
|
||||
|
||||
// check if every key has a value before adding to content
|
||||
const prelimImageKeys = Object.keys(prelimImage);
|
||||
const validImageFile = prelimImageKeys.every((key) => prelimImage[key]);
|
||||
|
||||
@@ -46,12 +46,23 @@ async function loadConfigModels(req) {
|
||||
(endpoint.models.fetch || endpoint.models.default),
|
||||
);
|
||||
|
||||
const fetchPromisesMap = {}; // Map for promises keyed by unique combination of baseURL and apiKey
|
||||
const uniqueKeyToNameMap = {}; // Map to associate unique keys with endpoint names
|
||||
/**
|
||||
* @type {Record<string, string[]>}
|
||||
* Map for promises keyed by unique combination of baseURL and apiKey */
|
||||
const fetchPromisesMap = {};
|
||||
/**
|
||||
* @type {Record<string, string[]>}
|
||||
* Map to associate unique keys with endpoint names; note: one key may can correspond to multiple endpoints */
|
||||
const uniqueKeyToEndpointsMap = {};
|
||||
/**
|
||||
* @type {Record<string, Partial<TEndpoint>>}
|
||||
* Map to associate endpoint names to their configurations */
|
||||
const endpointsMap = {};
|
||||
|
||||
for (let i = 0; i < customEndpoints.length; i++) {
|
||||
const endpoint = customEndpoints[i];
|
||||
const { models, name, baseURL, apiKey } = endpoint;
|
||||
endpointsMap[name] = endpoint;
|
||||
|
||||
const API_KEY = extractEnvVariable(apiKey);
|
||||
const BASE_URL = extractEnvVariable(baseURL);
|
||||
@@ -70,8 +81,8 @@ async function loadConfigModels(req) {
|
||||
name,
|
||||
userIdQuery: models.userIdQuery,
|
||||
});
|
||||
uniqueKeyToNameMap[uniqueKey] = uniqueKeyToNameMap[uniqueKey] || [];
|
||||
uniqueKeyToNameMap[uniqueKey].push(name);
|
||||
uniqueKeyToEndpointsMap[uniqueKey] = uniqueKeyToEndpointsMap[uniqueKey] || [];
|
||||
uniqueKeyToEndpointsMap[uniqueKey].push(name);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -86,10 +97,11 @@ async function loadConfigModels(req) {
|
||||
for (let i = 0; i < fetchedData.length; i++) {
|
||||
const currentKey = uniqueKeys[i];
|
||||
const modelData = fetchedData[i];
|
||||
const associatedNames = uniqueKeyToNameMap[currentKey];
|
||||
const associatedNames = uniqueKeyToEndpointsMap[currentKey];
|
||||
|
||||
for (const name of associatedNames) {
|
||||
modelsConfig[name] = modelData;
|
||||
const endpoint = endpointsMap[name];
|
||||
modelsConfig[name] = !modelData?.length ? endpoint.models.default ?? [] : modelData;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -262,4 +262,68 @@ describe('loadConfigModels', () => {
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('falls back to default models if fetching returns an empty array', async () => {
|
||||
getCustomConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
name: 'EndpointWithSameFetchKey',
|
||||
apiKey: 'API_KEY',
|
||||
baseURL: 'http://example.com',
|
||||
models: {
|
||||
fetch: true,
|
||||
default: ['defaultModel1'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'EmptyFetchModel',
|
||||
apiKey: 'API_KEY',
|
||||
baseURL: 'http://example.com',
|
||||
models: {
|
||||
fetch: true,
|
||||
default: ['defaultModel1', 'defaultModel2'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
fetchModels.mockResolvedValue([]);
|
||||
|
||||
const result = await loadConfigModels(mockRequest);
|
||||
expect(fetchModels).toHaveBeenCalledTimes(1);
|
||||
expect(result.EmptyFetchModel).toEqual(['defaultModel1', 'defaultModel2']);
|
||||
});
|
||||
|
||||
it('falls back to default models if fetching returns a falsy value', async () => {
|
||||
getCustomConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
name: 'FalsyFetchModel',
|
||||
apiKey: 'API_KEY',
|
||||
baseURL: 'http://example.com',
|
||||
models: {
|
||||
fetch: true,
|
||||
default: ['defaultModel1', 'defaultModel2'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
fetchModels.mockResolvedValue(false);
|
||||
|
||||
const result = await loadConfigModels(mockRequest);
|
||||
|
||||
expect(fetchModels).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
name: 'FalsyFetchModel',
|
||||
apiKey: 'API_KEY',
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.FalsyFetchModel).toEqual(['defaultModel1', 'defaultModel2']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const path = require('path');
|
||||
const { CacheKeys, configSchema } = require('librechat-data-provider');
|
||||
const { CacheKeys, configSchema, EImageOutputType } = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
@@ -55,6 +55,20 @@ async function loadCustomConfig() {
|
||||
}
|
||||
|
||||
const result = configSchema.strict().safeParse(customConfig);
|
||||
if (result?.error?.errors?.some((err) => err?.path && err.path?.includes('imageOutputType'))) {
|
||||
throw new Error(
|
||||
`
|
||||
Please specify a correct \`imageOutputType\` value (case-sensitive).
|
||||
|
||||
The available options are:
|
||||
- ${EImageOutputType.JPEG}
|
||||
- ${EImageOutputType.PNG}
|
||||
- ${EImageOutputType.WEBP}
|
||||
|
||||
Refer to the latest config file guide for more information:
|
||||
https://docs.librechat.ai/install/configuration/custom_config.html`,
|
||||
);
|
||||
}
|
||||
if (!result.success) {
|
||||
i === 0 && logger.error(`Invalid custom config file at ${configPath}`, result.error);
|
||||
i === 0 && i++;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { AnthropicClient } = require('~/app');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { AnthropicClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;
|
||||
@@ -7,14 +8,15 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const isUserProvided = ANTHROPIC_API_KEY === 'user_provided';
|
||||
|
||||
const anthropicApiKey = isUserProvided
|
||||
? await getAnthropicUserKey(req.user.id)
|
||||
? await getUserKey({ userId: req.user.id, name: EModelEndpoint.anthropic })
|
||||
: ANTHROPIC_API_KEY;
|
||||
|
||||
if (!anthropicApiKey) {
|
||||
throw new Error('Anthropic API key not provided. Please provide it again.');
|
||||
}
|
||||
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your ANTHROPIC_API_KEY has expired. Please provide your API key again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.anthropic);
|
||||
}
|
||||
|
||||
const client = new AnthropicClient(anthropicApiKey, {
|
||||
@@ -31,8 +33,4 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
};
|
||||
};
|
||||
|
||||
const getAnthropicUserKey = async (userId) => {
|
||||
return await getUserKey({ userId, name: 'anthropic' });
|
||||
};
|
||||
|
||||
module.exports = initializeClient;
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKey,
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
checkUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
@@ -26,18 +27,8 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||
userId: req.user.id,
|
||||
name: EModelEndpoint.assistants,
|
||||
});
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your Assistants API key has expired. Please provide your API key again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: EModelEndpoint.assistants });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
'Invalid JSON provided for Assistants API user values. Please provide them again.',
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.assistants);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: EModelEndpoint.assistants });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
|
||||
@@ -101,6 +92,14 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||
}
|
||||
}
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('Assistants API key not provided. Please provide it again.');
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
// const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { getUserKey, getUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
// const { OpenAIClient } = require('~/app');
|
||||
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyExpiry: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -52,9 +54,7 @@ describe('initializeClient', () => {
|
||||
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||
process.env.ASSISTANTS_BASE_URL = 'user_provided';
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' });
|
||||
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
@@ -70,11 +70,24 @@ describe('initializeClient', () => {
|
||||
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
const req = { user: { id: 'user123' } };
|
||||
const res = {};
|
||||
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/Invalid JSON/);
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/invalid_user_key/);
|
||||
});
|
||||
|
||||
test('throws error if API key is not provided', async () => {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
const {
|
||||
CacheKeys,
|
||||
ErrorTypes,
|
||||
envVarRegex,
|
||||
EModelEndpoint,
|
||||
FetchTokenConfig,
|
||||
extractEnvVariable,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { fetchModels } = require('~/server/services/ModelService');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
@@ -48,21 +49,29 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
`Your API values for ${endpoint} have expired. Please configure them again.`,
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(`Invalid JSON provided for ${endpoint} user values.`);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : CUSTOM_API_KEY;
|
||||
let baseURL = userProvidesURL ? userValues?.baseURL : CUSTOM_BASE_URL;
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (userProvidesURL && !baseURL) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_BASE_URL,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided.`);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { GoogleClient } = require('~/app');
|
||||
const { EModelEndpoint, AuthKeys } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { GoogleClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, PROXY } = process.env;
|
||||
@@ -9,10 +9,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userKey = null;
|
||||
if (expiresAt && isUserProvided) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your Google Credentials have expired. Please provide your Service Account JSON Key or Generative Language API Key again.',
|
||||
);
|
||||
checkUserKeyExpiry(expiresAt, EModelEndpoint.google);
|
||||
userKey = await getUserKey({ userId: req.user.id, name: EModelEndpoint.google });
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
// file deepcode ignore HardcodedNonCryptoSecret: No hardcoded secrets
|
||||
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { GoogleClient } = require('~/app');
|
||||
const { checkUserKeyExpiry, getUserKey } = require('../../UserService');
|
||||
|
||||
jest.mock('../../UserService', () => ({
|
||||
checkUserKeyExpiry: jest.fn().mockImplementation((expiresAt, errorMessage) => {
|
||||
if (new Date(expiresAt) < new Date()) {
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
}),
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
getUserKey: jest.fn().mockImplementation(() => ({})),
|
||||
}));
|
||||
|
||||
@@ -74,13 +69,8 @@ describe('google/initializeClient', () => {
|
||||
};
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
checkUserKeyExpiry.mockImplementation((expiresAt, errorMessage) => {
|
||||
throw new Error(errorMessage);
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Your Google Credentials have expired/,
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ const {
|
||||
mapModelToAzureConfig,
|
||||
resolveHeaders,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { isEnabled, isUserProvided } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { PluginsClient } = require('~/app');
|
||||
@@ -49,18 +49,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API values have expired. Please provide them again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
// gptPlugins/initializeClient.spec.js
|
||||
const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { PluginsClient } = require('~/app');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -205,7 +206,7 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' });
|
||||
|
||||
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
@@ -225,14 +226,12 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'test-model' } };
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({
|
||||
apiKey: JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: JSON.stringify({
|
||||
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
||||
azureOpenAIApiDeploymentName: 'test-deployment',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
const { azure } = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
@@ -251,7 +250,9 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = { modelOptions: { model: 'default-model' } };
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
|
||||
@@ -268,9 +269,22 @@ describe('gptPlugins/initializeClient', () => {
|
||||
|
||||
// Simulate an invalid JSON string returned from getUserKey
|
||||
getUserKey.mockResolvedValue('invalid-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -305,9 +319,22 @@ describe('gptPlugins/initializeClient', () => {
|
||||
|
||||
// Mock getUserKey to return a non-JSON string
|
||||
getUserKey.mockResolvedValue('not-a-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided for openAI user values/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -369,9 +396,10 @@ describe('gptPlugins/initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: 'test',
|
||||
baseURL: 'https://user-provided-url.com',
|
||||
});
|
||||
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
const {
|
||||
ErrorTypes,
|
||||
EModelEndpoint,
|
||||
resolveHeaders,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { isEnabled, isUserProvided } = require('~/server/utils');
|
||||
const { getAzureCredentials } = require('~/utils');
|
||||
const { OpenAIClient } = require('~/app');
|
||||
@@ -36,18 +37,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
|
||||
let userValues = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(
|
||||
expiresAt,
|
||||
'Your OpenAI API values have expired. Please provide them again.',
|
||||
);
|
||||
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
|
||||
);
|
||||
}
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint];
|
||||
@@ -99,8 +90,16 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
apiKey = clientOptions.azure.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (userProvidesKey & !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API key not provided. Please provide it again.`);
|
||||
throw new Error(`${endpoint} API Key not provided.`);
|
||||
}
|
||||
|
||||
const client = new OpenAIClient(apiKey, clientOptions);
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey } = require('~/server/services/UserService');
|
||||
const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initializeClient');
|
||||
const { OpenAIClient } = require('~/app');
|
||||
|
||||
// Mock getUserKey since it's the only function we want to mock
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKey: jest.fn(),
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
@@ -200,7 +201,9 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error if no API keys are provided in the environment', async () => {
|
||||
@@ -217,7 +220,7 @@ describe('initializeClient', () => {
|
||||
const endpointOption = {};
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
`${EModelEndpoint.openAI} API key not provided.`,
|
||||
`${EModelEndpoint.openAI} API Key not provided.`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -241,7 +244,7 @@ describe('initializeClient', () => {
|
||||
process.env.OPENAI_API_KEY = 'user_provided';
|
||||
|
||||
// Mock getUserKey to return the expected key
|
||||
getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' });
|
||||
|
||||
// Call the initializeClient function
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
@@ -266,7 +269,9 @@ describe('initializeClient', () => {
|
||||
// Mock getUserKey to return an invalid key
|
||||
getUserKey.mockResolvedValue(invalidKey);
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/expired_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
test('should throw an error when user-provided values are not valid JSON', async () => {
|
||||
@@ -281,9 +286,22 @@ describe('initializeClient', () => {
|
||||
|
||||
// Mock getUserKey to return a non-JSON string
|
||||
getUserKey.mockResolvedValue('not-a-json');
|
||||
getUserKeyValues.mockImplementation(() => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
});
|
||||
|
||||
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
||||
/Invalid JSON provided for openAI user values/,
|
||||
/invalid_user_key/,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -347,9 +365,10 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
const endpointOption = {};
|
||||
|
||||
getUserKey.mockResolvedValue(
|
||||
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
|
||||
);
|
||||
getUserKeyValues.mockResolvedValue({
|
||||
apiKey: 'test',
|
||||
baseURL: 'https://user-provided-url.com',
|
||||
});
|
||||
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const fetch = require('node-fetch');
|
||||
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage');
|
||||
const { ref, uploadBytes, getDownloadURL, getStream, deleteObject } = require('firebase/storage');
|
||||
const { getBufferMetadata } = require('~/server/utils');
|
||||
const { getFirebaseStorage } = require('./initialize');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Deletes a file from Firebase Storage.
|
||||
@@ -15,7 +16,7 @@ const { getFirebaseStorage } = require('./initialize');
|
||||
async function deleteFile(basePath, fileName) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot delete file from Firebase Storage.');
|
||||
logger.error('Firebase is not initialized. Cannot delete file from Firebase Storage.');
|
||||
throw new Error('Firebase is not initialized');
|
||||
}
|
||||
|
||||
@@ -23,9 +24,9 @@ async function deleteFile(basePath, fileName) {
|
||||
|
||||
try {
|
||||
await deleteObject(storageRef);
|
||||
console.log('File deleted successfully from Firebase Storage');
|
||||
logger.debug('File deleted successfully from Firebase Storage');
|
||||
} catch (error) {
|
||||
console.error('Error deleting file from Firebase Storage:', error.message);
|
||||
logger.error('Error deleting file from Firebase Storage:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -51,7 +52,7 @@ async function deleteFile(basePath, fileName) {
|
||||
async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot save file to Firebase Storage.');
|
||||
logger.error('Firebase is not initialized. Cannot save file to Firebase Storage.');
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -63,7 +64,7 @@ async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' })
|
||||
await uploadBytes(storageRef, buffer);
|
||||
return await getBufferMetadata(buffer);
|
||||
} catch (error) {
|
||||
console.error('Error uploading file to Firebase Storage:', error.message);
|
||||
logger.error('Error uploading file to Firebase Storage:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -87,7 +88,7 @@ async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' })
|
||||
async function getFirebaseURL({ fileName, basePath = 'images' }) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
||||
logger.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -96,7 +97,7 @@ async function getFirebaseURL({ fileName, basePath = 'images' }) {
|
||||
try {
|
||||
return await getDownloadURL(storageRef);
|
||||
} catch (error) {
|
||||
console.error('Error fetching file URL from Firebase Storage:', error.message);
|
||||
logger.error('Error fetching file URL from Firebase Storage:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -212,6 +213,26 @@ async function uploadFileToFirebase({ req, file, file_id }) {
|
||||
return { filepath: downloadURL, bytes };
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a readable stream for a file from Firebase storage.
|
||||
*
|
||||
* @param {string} filepath - The filepath.
|
||||
* @returns {ReadableStream} A readable stream of the file.
|
||||
*/
|
||||
function getFirebaseFileStream(filepath) {
|
||||
try {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
throw new Error('Firebase is not initialized');
|
||||
}
|
||||
const fileRef = ref(storage, filepath);
|
||||
return getStream(fileRef);
|
||||
} catch (error) {
|
||||
logger.error('Error getting Firebase file stream:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deleteFile,
|
||||
getFirebaseURL,
|
||||
@@ -219,4 +240,5 @@ module.exports = {
|
||||
deleteFirebaseFile,
|
||||
uploadFileToFirebase,
|
||||
saveBufferToFirebase,
|
||||
getFirebaseFileStream,
|
||||
};
|
||||
|
||||
@@ -8,7 +8,7 @@ const { updateFile } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* Converts an image file to the target format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
* @param {Object} params - The params object.
|
||||
@@ -21,7 +21,7 @@ const { logger } = require('~/config');
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - filepath: The path where the converted image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
@@ -39,15 +39,16 @@ async function uploadImageToFirebase({ req, file, file_id, endpoint, resolution
|
||||
|
||||
let webPBuffer;
|
||||
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
webPBuffer = resizedBuffer;
|
||||
} else {
|
||||
webPBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
webPBuffer = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
// Replace or append the correct extension
|
||||
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||
fileName = fileName.replace(extRegExp, '.webp');
|
||||
fileName = fileName.replace(extRegExp, targetExtension);
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += '.webp';
|
||||
fileName += targetExtension;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,7 +80,7 @@ async function prepareImageURL(req, file) {
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image.
|
||||
* @param {string} params.userId - The user ID.
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
|
||||
@@ -255,6 +255,21 @@ async function uploadLocalFile({ req, file, file_id }) {
|
||||
return { filepath, bytes };
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a readable stream for a file from local storage.
|
||||
*
|
||||
* @param {string} filepath - The filepath.
|
||||
* @returns {ReadableStream} A readable stream of the file.
|
||||
*/
|
||||
function getLocalFileStream(filepath) {
|
||||
try {
|
||||
return fs.createReadStream(filepath);
|
||||
} catch (error) {
|
||||
logger.error('Error getting local file stream:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveLocalFile,
|
||||
saveLocalImage,
|
||||
@@ -263,4 +278,5 @@ module.exports = {
|
||||
getLocalFileURL,
|
||||
deleteLocalFile,
|
||||
uploadLocalFile,
|
||||
getLocalFileStream,
|
||||
};
|
||||
|
||||
@@ -6,11 +6,11 @@ const { updateUser } = require('~/models/userMethods');
|
||||
const { updateFile } = require('~/models/File');
|
||||
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* Converts an image file to the target format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
* If the original image is already in WebP format, it writes the resized image back. Otherwise,
|
||||
* it converts the image to WebP format before saving.
|
||||
* If the original image is already in target format, it writes the resized image back. Otherwise,
|
||||
* it converts the image to target format before saving.
|
||||
*
|
||||
* The original image is deleted after conversion.
|
||||
* @param {Object} params - The params object.
|
||||
@@ -24,7 +24,7 @@ const { updateFile } = require('~/models/File');
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - filepath: The path where the converted image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
@@ -48,16 +48,17 @@ async function uploadLocalImage({ req, file, file_id, endpoint, resolution = 'hi
|
||||
|
||||
const fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
const newPath = path.join(userPath, fileName);
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
const bytes = Buffer.byteLength(resizedBuffer);
|
||||
await fs.promises.writeFile(newPath, resizedBuffer);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
const outputFilePath = newPath.replace(extension, '.webp');
|
||||
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
const outputFilePath = newPath.replace(extension, targetExtension);
|
||||
const data = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
await fs.promises.writeFile(outputFilePath, data);
|
||||
const bytes = Buffer.byteLength(data);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
||||
@@ -109,7 +110,7 @@ async function prepareImagesLocal(req, file) {
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image.
|
||||
* @param {string} params.userId - The user ID.
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
|
||||
@@ -60,4 +60,20 @@ async function deleteOpenAIFile(req, file, openai) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { uploadOpenAIFile, deleteOpenAIFile };
|
||||
/**
|
||||
* Retrieves a readable stream for a file from local storage.
|
||||
*
|
||||
* @param {string} file_id - The file_id.
|
||||
* @param {OpenAI} openai - The initialized OpenAI client.
|
||||
* @returns {Promise<ReadableStream>} A readable stream of the file.
|
||||
*/
|
||||
async function getOpenAIFileStream(file_id, openai) {
|
||||
try {
|
||||
return await openai.files.content(file_id);
|
||||
} catch (error) {
|
||||
logger.error('Error getting OpenAI file download stream:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream };
|
||||
|
||||
@@ -18,9 +18,12 @@ const { logger } = require('~/config');
|
||||
* file path is invalid or if there is an error in deletion.
|
||||
*/
|
||||
const deleteVectors = async (req, file) => {
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
if (!file.embedded || !process.env.RAG_API_URL) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
return await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
@@ -28,6 +31,9 @@ const deleteVectors = async (req, file) => {
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error deleting vectors', error);
|
||||
throw new Error(error.message || 'An error occurred during file deletion.');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
const sharp = require('sharp');
|
||||
const fs = require('fs').promises;
|
||||
const fetch = require('node-fetch');
|
||||
const { EImageOutputType } = require('librechat-data-provider');
|
||||
const { resizeAndConvert } = require('./resize');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object),
|
||||
* processes the image to a square format, converts it to WebP format, and returns the resized buffer.
|
||||
* processes the image to a square format, converts it to target format, and returns the resized buffer.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier of the user for whom the avatar is being uploaded.
|
||||
* @param {string} options.desiredFormat - The desired output format of the image.
|
||||
* @param {(string|Buffer|File)} params.input - The input representing the avatar image. Can be a URL (string),
|
||||
* a Buffer, or a File object.
|
||||
*
|
||||
@@ -19,7 +21,7 @@ const { logger } = require('~/config');
|
||||
* @throws {Error} Throws an error if the user ID is undefined, the input type is invalid, the image fetching fails,
|
||||
* or any other error occurs during the processing.
|
||||
*/
|
||||
async function resizeAvatar({ userId, input }) {
|
||||
async function resizeAvatar({ userId, input, desiredFormat = EImageOutputType.PNG }) {
|
||||
try {
|
||||
if (userId === undefined) {
|
||||
throw new Error('User ID is undefined');
|
||||
@@ -53,7 +55,10 @@ async function resizeAvatar({ userId, input }) {
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const { buffer } = await resizeAndConvert(squaredBuffer);
|
||||
const { buffer } = await resizeAndConvert({
|
||||
inputBuffer: squaredBuffer,
|
||||
desiredFormat,
|
||||
});
|
||||
return buffer;
|
||||
} catch (error) {
|
||||
logger.error('Error uploading the avatar:', error);
|
||||
|
||||
@@ -3,9 +3,10 @@ const path = require('path');
|
||||
const sharp = require('sharp');
|
||||
const { resizeImageBuffer } = require('./resize');
|
||||
const { getStrategyFunctions } = require('../strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Converts an image file or buffer to WebP format with specified resolution.
|
||||
* Converts an image file or buffer to target output type with specified resolution.
|
||||
*
|
||||
* @param {Express.Request} req - The request object, containing user and app configuration data.
|
||||
* @param {Buffer | Express.Multer.File} file - The file object, containing either a path or a buffer.
|
||||
@@ -14,7 +15,7 @@ const { getStrategyFunctions } = require('../strategies');
|
||||
* @returns {Promise<{filepath: string, bytes: number, width: number, height: number}>} An object containing the path, size, and dimensions of the converted image.
|
||||
* @throws Throws an error if there is an issue during the conversion process.
|
||||
*/
|
||||
async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
async function convertImage(req, file, resolution = 'high', basename = '') {
|
||||
try {
|
||||
let inputBuffer;
|
||||
let outputBuffer;
|
||||
@@ -37,13 +38,13 @@ async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
height,
|
||||
} = await resizeImageBuffer(inputBuffer, resolution);
|
||||
|
||||
// Check if the file is already in WebP format
|
||||
// If it isn't, convert it:
|
||||
if (extension === '.webp') {
|
||||
// Check if the file is already in target format; if it isn't, convert it:
|
||||
const targetExtension = `.${req.app.locals.imageOutputType}`;
|
||||
if (extension === targetExtension) {
|
||||
outputBuffer = resizedBuffer;
|
||||
} else {
|
||||
outputBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
extension = '.webp';
|
||||
outputBuffer = await sharp(resizedBuffer).toFormat(req.app.locals.imageOutputType).toBuffer();
|
||||
extension = targetExtension;
|
||||
}
|
||||
|
||||
// Generate a new filename for the output file
|
||||
@@ -61,9 +62,9 @@ async function convertToWebP(req, file, resolution = 'high', basename = '') {
|
||||
const bytes = Buffer.byteLength(outputBuffer);
|
||||
return { filepath: savedFilePath, bytes, width, height };
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { convertToWebP };
|
||||
module.exports = { convertImage };
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const axios = require('axios');
|
||||
const { EModelEndpoint, FileSources } = require('librechat-data-provider');
|
||||
const { EModelEndpoint, FileSources, VisionModes } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('../strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -30,11 +30,20 @@ const base64Only = new Set([EModelEndpoint.google, EModelEndpoint.anthropic]);
|
||||
* @param {Express.Request} req - The request object.
|
||||
* @param {Array<MongoFile>} files - The array of files to encode and format.
|
||||
* @param {EModelEndpoint} [endpoint] - Optional: The endpoint for the image.
|
||||
* @param {string} [mode] - Optional: The endpoint mode for the image.
|
||||
* @returns {Promise<Object>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
*/
|
||||
async function encodeAndFormat(req, files, endpoint) {
|
||||
async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
const promises = [];
|
||||
const encodingMethods = {};
|
||||
const result = {
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of files) {
|
||||
const source = file.source ?? FileSources.local;
|
||||
@@ -69,11 +78,6 @@ async function encodeAndFormat(req, files, endpoint) {
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
const formattedImages = await Promise.all(promises);
|
||||
|
||||
const result = {
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
||||
for (const [file, imageContent] of formattedImages) {
|
||||
const fileMetadata = {
|
||||
type: file.type,
|
||||
@@ -98,12 +102,18 @@ async function encodeAndFormat(req, files, endpoint) {
|
||||
image_url: {
|
||||
url: imageContent.startsWith('http')
|
||||
? imageContent
|
||||
: `data:image/webp;base64,${imageContent}`,
|
||||
: `data:${file.type};base64,${imageContent}`,
|
||||
detail,
|
||||
},
|
||||
};
|
||||
|
||||
if (endpoint && endpoint === EModelEndpoint.google) {
|
||||
if (endpoint && endpoint === EModelEndpoint.google && mode === VisionModes.generative) {
|
||||
delete imagePart.image_url;
|
||||
imagePart.inlineData = {
|
||||
mimeType: file.type,
|
||||
data: imageContent,
|
||||
};
|
||||
} else if (endpoint && endpoint === EModelEndpoint.google) {
|
||||
imagePart.image_url = imagePart.image_url.url;
|
||||
} else if (endpoint && endpoint === EModelEndpoint.anthropic) {
|
||||
imagePart.type = 'image';
|
||||
|
||||
@@ -62,14 +62,20 @@ async function resizeImageBuffer(inputBuffer, resolution, endpoint) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Resizes an image buffer to webp format as well as reduces 150 px width.
|
||||
* Resizes an image buffer to a specified format and width.
|
||||
*
|
||||
* @param {Buffer} inputBuffer - The buffer of the image to be resized.
|
||||
* @returns {Promise<{ buffer: Buffer, width: number, height: number, bytes: number }>} An object containing the resized image buffer, its size and dimensions.
|
||||
* @throws Will throw an error if the resolution parameter is invalid.
|
||||
* @param {Object} options - The options for resizing and converting the image.
|
||||
* @param {Buffer} options.inputBuffer - The buffer of the image to be resized.
|
||||
* @param {string} options.desiredFormat - The desired output format of the image.
|
||||
* @param {number} [options.width=150] - The desired width of the image. Defaults to 150 pixels.
|
||||
* @returns {Promise<{ buffer: Buffer, width: number, height: number, bytes: number }>} An object containing the resized image buffer, its size, and dimensions.
|
||||
* @throws Will throw an error if the resolution or format parameters are invalid.
|
||||
*/
|
||||
async function resizeAndConvert(inputBuffer) {
|
||||
const resizedBuffer = await sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer();
|
||||
async function resizeAndConvert({ inputBuffer, desiredFormat, width = 150 }) {
|
||||
const resizedBuffer = await sharp(inputBuffer)
|
||||
.resize({ width })
|
||||
.toFormat(desiredFormat)
|
||||
.toBuffer();
|
||||
const resizedMetadata = await sharp(resizedBuffer).metadata();
|
||||
return {
|
||||
buffer: resizedBuffer,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const path = require('path');
|
||||
const mime = require('mime');
|
||||
const { v4 } = require('uuid');
|
||||
const mime = require('mime/lite');
|
||||
const {
|
||||
isUUID,
|
||||
megabyte,
|
||||
@@ -9,17 +9,17 @@ const {
|
||||
imageExtRegex,
|
||||
EModelEndpoint,
|
||||
mergeFileConfig,
|
||||
hostImageIdSuffix,
|
||||
hostImageNamePrefix,
|
||||
} = require('librechat-data-provider');
|
||||
const { convertToWebP, resizeAndConvert } = require('~/server/services/Files/images');
|
||||
const { convertImage, resizeAndConvert } = require('~/server/services/Files/images');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
|
||||
const { isEnabled, determineFileType } = require('~/server/utils');
|
||||
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
||||
const { getStrategyFunctions } = require('./strategies');
|
||||
const { determineFileType } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { GPTS_DOWNLOAD_IMAGES = 'true' } = process.env;
|
||||
|
||||
const processFiles = async (files) => {
|
||||
const promises = [];
|
||||
for (let file of files) {
|
||||
@@ -207,7 +207,7 @@ const processImageFile = async ({ req, res, file, metadata }) => {
|
||||
filename: file.originalname,
|
||||
context: FileContext.message_attachment,
|
||||
source,
|
||||
type: 'image/webp',
|
||||
type: `image/${req.app.locals.imageOutputType}`,
|
||||
width,
|
||||
height,
|
||||
},
|
||||
@@ -223,26 +223,37 @@ const processImageFile = async ({ req, res, file, metadata }) => {
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {FileContext} params.context - The context of the file (e.g., 'avatar', 'image_generation', etc.)
|
||||
* @returns {Promise<{ filepath: string, filename: string, source: string, type: 'image/webp'}>}
|
||||
* @param {boolean} [params.resize=true] - Whether to resize and convert the image to target format. Default is `true`.
|
||||
* @param {{ buffer: Buffer, width: number, height: number, bytes: number, filename: string, type: string, file_id: string }} [params.metadata] - Required metadata for the file if resize is false.
|
||||
* @returns {Promise<{ filepath: string, filename: string, source: string, type: string}>}
|
||||
*/
|
||||
const uploadImageBuffer = async ({ req, context }) => {
|
||||
const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true }) => {
|
||||
const source = req.app.locals.fileStrategy;
|
||||
const { saveBuffer } = getStrategyFunctions(source);
|
||||
const { buffer, width, height, bytes } = await resizeAndConvert(req.file.buffer);
|
||||
const file_id = v4();
|
||||
const fileName = `img-${file_id}.webp`;
|
||||
let { buffer, width, height, bytes, filename, file_id, type } = metadata;
|
||||
if (resize) {
|
||||
file_id = v4();
|
||||
type = `image/${req.app.locals.imageOutputType}`;
|
||||
({ buffer, width, height, bytes } = await resizeAndConvert({
|
||||
inputBuffer: buffer,
|
||||
desiredFormat: req.app.locals.imageOutputType,
|
||||
}));
|
||||
filename = `${path.basename(req.file.originalname, path.extname(req.file.originalname))}.${
|
||||
req.app.locals.imageOutputType
|
||||
}`;
|
||||
}
|
||||
|
||||
const filepath = await saveBuffer({ userId: req.user.id, fileName, buffer });
|
||||
const filepath = await saveBuffer({ userId: req.user.id, fileName: filename, buffer });
|
||||
return await createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id,
|
||||
bytes,
|
||||
filepath,
|
||||
filename: req.file.originalname,
|
||||
filename,
|
||||
context,
|
||||
source,
|
||||
type: 'image/webp',
|
||||
type,
|
||||
width,
|
||||
height,
|
||||
},
|
||||
@@ -293,9 +304,10 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
|
||||
file_id: id ?? file_id,
|
||||
temp_file_id,
|
||||
bytes,
|
||||
filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath,
|
||||
filename: filename ?? file.originalname,
|
||||
filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath,
|
||||
context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment,
|
||||
model: isAssistantUpload ? req.body.model : undefined,
|
||||
type: file.mimetype,
|
||||
embedded,
|
||||
source,
|
||||
@@ -305,6 +317,96 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
|
||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {object} params - The params object.
|
||||
* @param {OpenAI} params.openai - The OpenAI client instance.
|
||||
* @param {string} params.file_id - The ID of the file to retrieve.
|
||||
* @param {string} params.userId - The user ID.
|
||||
* @param {string} [params.filename] - The name of the file. `undefined` for `file_citation` annotations.
|
||||
* @param {boolean} [params.saveFile=false] - Whether to save the file metadata to the database.
|
||||
* @param {boolean} [params.updateUsage=false] - Whether to update file usage in database.
|
||||
*/
|
||||
const processOpenAIFile = async ({
|
||||
openai,
|
||||
file_id,
|
||||
userId,
|
||||
filename,
|
||||
saveFile = false,
|
||||
updateUsage = false,
|
||||
}) => {
|
||||
const _file = await openai.files.retrieve(file_id);
|
||||
const originalName = filename ?? (_file.filename ? path.basename(_file.filename) : undefined);
|
||||
const filepath = `${openai.baseURL}/files/${userId}/${file_id}${
|
||||
originalName ? `/${originalName}` : ''
|
||||
}`;
|
||||
const type = mime.getType(originalName ?? file_id);
|
||||
|
||||
const file = {
|
||||
..._file,
|
||||
type,
|
||||
file_id,
|
||||
filepath,
|
||||
usage: 1,
|
||||
user: userId,
|
||||
context: _file.purpose,
|
||||
source: FileSources.openai,
|
||||
model: openai.req.body.model,
|
||||
filename: originalName ?? file_id,
|
||||
};
|
||||
|
||||
if (saveFile) {
|
||||
await createFile(file, true);
|
||||
} else if (updateUsage) {
|
||||
try {
|
||||
await updateFileUsage({ file_id });
|
||||
} catch (error) {
|
||||
logger.error('Error updating file usage', error);
|
||||
}
|
||||
}
|
||||
|
||||
return file;
|
||||
};
|
||||
|
||||
/**
|
||||
* Process OpenAI image files, convert to target format, save and return file metadata.
|
||||
* @param {object} params - The params object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Buffer} params.buffer - The image buffer.
|
||||
* @param {string} params.file_id - The file ID.
|
||||
* @param {string} params.filename - The filename.
|
||||
* @param {string} params.fileExt - The file extension.
|
||||
* @returns {Promise<MongoFile>} The file metadata.
|
||||
*/
|
||||
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
|
||||
const currentDate = new Date();
|
||||
const formattedDate = currentDate.toISOString();
|
||||
const _file = await convertImage(req, buffer, 'high', `${file_id}${fileExt}`);
|
||||
const file = {
|
||||
..._file,
|
||||
usage: 1,
|
||||
user: req.user.id,
|
||||
type: `image/${req.app.locals.imageOutputType}`,
|
||||
createdAt: formattedDate,
|
||||
updatedAt: formattedDate,
|
||||
source: req.app.locals.fileStrategy,
|
||||
context: FileContext.assistants_output,
|
||||
file_id: `${file_id}${hostImageIdSuffix}`,
|
||||
filename: `${hostImageNamePrefix}${filename}`,
|
||||
};
|
||||
createFile(file, true);
|
||||
createFile(
|
||||
{
|
||||
...file,
|
||||
file_id,
|
||||
filename,
|
||||
source: FileSources.openai,
|
||||
type: mime.getType(fileExt),
|
||||
},
|
||||
true,
|
||||
);
|
||||
return file;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves and processes an OpenAI file based on its type.
|
||||
*
|
||||
@@ -312,7 +414,7 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
|
||||
* @param {OpenAIClient} params.openai - The OpenAI client instance.
|
||||
* @param {RunClient} params.client - The LibreChat client instance: either refers to `openai` or `streamRunManager`.
|
||||
* @param {string} params.file_id - The ID of the file to retrieve.
|
||||
* @param {string} params.basename - The basename of the file (if image); e.g., 'image.jpg'.
|
||||
* @param {string} [params.basename] - The basename of the file (if image); e.g., 'image.jpg'. `undefined` for `file_citation` annotations.
|
||||
* @param {boolean} [params.unknownType] - Whether the file type is unknown.
|
||||
* @returns {Promise<{file_id: string, filepath: string, source: string, bytes?: number, width?: number, height?: number} | null>}
|
||||
* - Returns null if `file_id` is not defined; else, the file metadata if successfully retrieved and processed.
|
||||
@@ -328,107 +430,69 @@ async function retrieveAndProcessFile({
|
||||
return null;
|
||||
}
|
||||
|
||||
if (client.attachedFileIds?.has(file_id)) {
|
||||
return {
|
||||
file_id,
|
||||
// filepath: TODO: local source filepath?,
|
||||
source: FileSources.openai,
|
||||
};
|
||||
}
|
||||
|
||||
let basename = _basename;
|
||||
const downloadImages = isEnabled(GPTS_DOWNLOAD_IMAGES);
|
||||
const processArgs = { openai, file_id, filename: basename, userId: client.req.user.id };
|
||||
|
||||
/**
|
||||
* @param {string} file_id - The ID of the file to retrieve.
|
||||
* @param {boolean} [save] - Whether to save the file metadata to the database.
|
||||
*/
|
||||
const retrieveFile = async (file_id, save = false) => {
|
||||
const _file = await openai.files.retrieve(file_id);
|
||||
const filepath = `/api/files/download/${file_id}`;
|
||||
const file = {
|
||||
..._file,
|
||||
type: mime.getType(_file.filename),
|
||||
filepath,
|
||||
usage: 1,
|
||||
file_id,
|
||||
context: _file.purpose ?? FileContext.message_attachment,
|
||||
source: FileSources.openai,
|
||||
};
|
||||
|
||||
if (save) {
|
||||
await createFile(file, true);
|
||||
} else {
|
||||
try {
|
||||
await updateFileUsage({ file_id });
|
||||
} catch (error) {
|
||||
logger.error('Error updating file usage', error);
|
||||
}
|
||||
}
|
||||
|
||||
return file;
|
||||
};
|
||||
|
||||
// If image downloads are not enabled or no basename provided, return only the file metadata
|
||||
if (!downloadImages || (!basename && !downloadImages)) {
|
||||
return await retrieveFile(file_id, true);
|
||||
// If no basename provided, return only the file metadata
|
||||
if (!basename) {
|
||||
return await processOpenAIFile({ ...processArgs, saveFile: true });
|
||||
}
|
||||
|
||||
let data;
|
||||
try {
|
||||
const fileExt = path.extname(basename);
|
||||
if (client.attachedFileIds?.has(file_id) || client.processedFileIds?.has(file_id)) {
|
||||
return processOpenAIFile({ ...processArgs, updateUsage: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<Buffer>} The file data buffer.
|
||||
*/
|
||||
const getDataBuffer = async () => {
|
||||
const response = await openai.files.content(file_id);
|
||||
data = await response.arrayBuffer();
|
||||
} catch (error) {
|
||||
logger.error('Error downloading file from OpenAI:', error);
|
||||
return await retrieveFile(file_id);
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return await retrieveFile(file_id);
|
||||
}
|
||||
const dataBuffer = Buffer.from(data);
|
||||
|
||||
/**
|
||||
* @param {Buffer} dataBuffer
|
||||
* @param {string} fileExt
|
||||
*/
|
||||
const processAsImage = async (dataBuffer, fileExt) => {
|
||||
// Logic to process image files, convert to webp, etc.
|
||||
const _file = await convertToWebP(client.req, dataBuffer, 'high', `${file_id}${fileExt}`);
|
||||
const file = {
|
||||
..._file,
|
||||
type: 'image/webp',
|
||||
usage: 1,
|
||||
file_id,
|
||||
source: FileSources.openai,
|
||||
};
|
||||
createFile(file, true);
|
||||
return file;
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
return Buffer.from(arrayBuffer);
|
||||
};
|
||||
|
||||
/** @param {Buffer} dataBuffer */
|
||||
const processOtherFileTypes = async (dataBuffer) => {
|
||||
// Logic to handle other file types
|
||||
logger.debug('[retrieveAndProcessFile] Non-image file type detected');
|
||||
return { filepath: `/api/files/download/${file_id}`, bytes: dataBuffer.length };
|
||||
};
|
||||
let dataBuffer;
|
||||
if (unknownType || !fileExt || imageExtRegex.test(basename)) {
|
||||
try {
|
||||
dataBuffer = await getDataBuffer();
|
||||
} catch (error) {
|
||||
logger.error('Error downloading file from OpenAI:', error);
|
||||
dataBuffer = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (!dataBuffer) {
|
||||
return await processOpenAIFile({ ...processArgs, saveFile: true });
|
||||
}
|
||||
|
||||
// If the filetype is unknown, inspect the file
|
||||
if (unknownType || !path.extname(basename)) {
|
||||
if (dataBuffer && (unknownType || !fileExt)) {
|
||||
const detectedExt = await determineFileType(dataBuffer);
|
||||
if (detectedExt && imageExtRegex.test('.' + detectedExt)) {
|
||||
return await processAsImage(dataBuffer, detectedExt);
|
||||
} else {
|
||||
return await processOtherFileTypes(dataBuffer);
|
||||
}
|
||||
}
|
||||
const isImageOutput = detectedExt && imageExtRegex.test('.' + detectedExt);
|
||||
|
||||
// Existing logic for processing known image types
|
||||
if (downloadImages && basename && path.extname(basename) && imageExtRegex.test(basename)) {
|
||||
return await processAsImage(dataBuffer, path.extname(basename));
|
||||
if (!isImageOutput) {
|
||||
return await processOpenAIFile({ ...processArgs, saveFile: true });
|
||||
}
|
||||
|
||||
return await processOpenAIImageOutput({
|
||||
file_id,
|
||||
req: client.req,
|
||||
buffer: dataBuffer,
|
||||
filename: basename,
|
||||
fileExt: detectedExt,
|
||||
});
|
||||
} else if (dataBuffer && imageExtRegex.test(basename)) {
|
||||
return await processOpenAIImageOutput({
|
||||
file_id,
|
||||
req: client.req,
|
||||
buffer: dataBuffer,
|
||||
filename: basename,
|
||||
fileExt,
|
||||
});
|
||||
} else {
|
||||
logger.debug('[retrieveAndProcessFile] Not an image or invalid extension: ', basename);
|
||||
return await processOtherFileTypes(dataBuffer);
|
||||
logger.debug(`[retrieveAndProcessFile] Non-image file type detected: ${basename}`);
|
||||
return await processOpenAIFile({ ...processArgs, saveFile: true });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ const {
|
||||
saveBufferToFirebase,
|
||||
uploadImageToFirebase,
|
||||
processFirebaseAvatar,
|
||||
getFirebaseFileStream,
|
||||
} = require('./Firebase');
|
||||
const {
|
||||
getLocalFileURL,
|
||||
@@ -16,8 +17,9 @@ const {
|
||||
uploadLocalImage,
|
||||
prepareImagesLocal,
|
||||
processLocalAvatar,
|
||||
getLocalFileStream,
|
||||
} = require('./Local');
|
||||
const { uploadOpenAIFile, deleteOpenAIFile } = require('./OpenAI');
|
||||
const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
|
||||
const { uploadVectors, deleteVectors } = require('./VectorDB');
|
||||
|
||||
/**
|
||||
@@ -35,6 +37,7 @@ const firebaseStrategy = () => ({
|
||||
prepareImagePayload: prepareImageURL,
|
||||
processAvatar: processFirebaseAvatar,
|
||||
handleImageUpload: uploadImageToFirebase,
|
||||
getDownloadStream: getFirebaseFileStream,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -51,6 +54,7 @@ const localStrategy = () => ({
|
||||
processAvatar: processLocalAvatar,
|
||||
handleImageUpload: uploadLocalImage,
|
||||
prepareImagePayload: prepareImagesLocal,
|
||||
getDownloadStream: getLocalFileStream,
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -70,6 +74,8 @@ const vectorStrategy = () => ({
|
||||
handleImageUpload: null,
|
||||
/** @type {typeof prepareImagesLocal | null} */
|
||||
prepareImagePayload: null,
|
||||
/** @type {typeof getLocalFileStream | null} */
|
||||
getDownloadStream: null,
|
||||
handleFileUpload: uploadVectors,
|
||||
deleteFile: deleteVectors,
|
||||
});
|
||||
@@ -94,6 +100,7 @@ const openAIStrategy = () => ({
|
||||
prepareImagePayload: null,
|
||||
deleteFile: deleteOpenAIFile,
|
||||
handleFileUpload: uploadOpenAIFile,
|
||||
getDownloadStream: getOpenAIFileStream,
|
||||
});
|
||||
|
||||
// Strategy Selector
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
const axios = require('axios');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { extractBaseURL, inputSchema, processModelData } = require('~/utils');
|
||||
const { extractBaseURL, inputSchema, processModelData, logAxiosError } = require('~/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
// const { getAzureCredentials, genAzureChatCompletion } = require('~/utils/');
|
||||
|
||||
const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService').config;
|
||||
|
||||
@@ -77,29 +74,7 @@ const fetchModels = async ({
|
||||
models = input.data.map((item) => item.id);
|
||||
} catch (error) {
|
||||
const logMessage = `Failed to fetch models from ${azure ? 'Azure ' : ''}${name} API`;
|
||||
if (error.response) {
|
||||
logger.error(
|
||||
`${logMessage} The request was made and the server responded with a status code that falls out of the range of 2xx: ${
|
||||
error.message ? error.message : ''
|
||||
}`,
|
||||
{
|
||||
headers: error.response.headers,
|
||||
status: error.response.status,
|
||||
data: error.response.data,
|
||||
},
|
||||
);
|
||||
} else if (error.request) {
|
||||
logger.error(
|
||||
`${logMessage} The request was made but no response was received: ${
|
||||
error.message ? error.message : ''
|
||||
}`,
|
||||
{
|
||||
request: error.request,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
logger.error(`${logMessage} Something happened in setting up the request`, error);
|
||||
}
|
||||
logAxiosError({ message: logMessage, error });
|
||||
}
|
||||
|
||||
return models;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const path = require('path');
|
||||
const {
|
||||
StepTypes,
|
||||
ContentTypes,
|
||||
@@ -102,18 +101,20 @@ class StreamRunManager {
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async addContentData(data) {
|
||||
const { type, index } = data;
|
||||
this.finalMessage.content[index] = { type, [type]: data[type] };
|
||||
const { type, index, edited } = data;
|
||||
/** @type {ContentPart} */
|
||||
const contentPart = data[type];
|
||||
this.finalMessage.content[index] = { type, [type]: contentPart };
|
||||
|
||||
if (type === ContentTypes.TEXT) {
|
||||
this.text += data[type].value;
|
||||
if (type === ContentTypes.TEXT && !edited) {
|
||||
this.text += contentPart.value;
|
||||
return;
|
||||
}
|
||||
|
||||
const contentData = {
|
||||
index,
|
||||
type,
|
||||
[type]: data[type],
|
||||
[type]: contentPart,
|
||||
thread_id: this.thread_id,
|
||||
messageId: this.finalMessage.messageId,
|
||||
conversationId: this.finalMessage.conversationId,
|
||||
@@ -220,14 +221,9 @@ class StreamRunManager {
|
||||
file_id,
|
||||
basename: `${file_id}.png`,
|
||||
});
|
||||
// toolCall.asset_pointer = file.filepath;
|
||||
const prelimImage = {
|
||||
file_id,
|
||||
filename: path.basename(file.filepath),
|
||||
filepath: file.filepath,
|
||||
height: file.height,
|
||||
width: file.width,
|
||||
};
|
||||
|
||||
const prelimImage = file;
|
||||
|
||||
// check if every key has a value before adding to content
|
||||
const prelimImageKeys = Object.keys(prelimImage);
|
||||
const validImageFile = prelimImageKeys.every((key) => prelimImage[key]);
|
||||
@@ -593,7 +589,7 @@ class StreamRunManager {
|
||||
*/
|
||||
async handleMessageEvent(event) {
|
||||
if (event.event === AssistantStreamEvents.ThreadMessageCompleted) {
|
||||
this.messageCompleted(event);
|
||||
await this.messageCompleted(event);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -613,6 +609,7 @@ class StreamRunManager {
|
||||
this.addContentData({
|
||||
[ContentTypes.TEXT]: { value: result.text },
|
||||
type: ContentTypes.TEXT,
|
||||
edited: result.edited,
|
||||
index,
|
||||
});
|
||||
this.messages.push(message);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const axios = require('axios');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils');
|
||||
|
||||
/**
|
||||
* @typedef {Object} RetrieveOptions
|
||||
@@ -54,33 +54,8 @@ async function retrieveRun({ thread_id, run_id, timeout, openai }) {
|
||||
const response = await axios.get(url, axiosConfig);
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
const logMessage = '[retrieveRun] Failed to retrieve run data:';
|
||||
const timedOutMessage = 'Cannot read properties of undefined (reading \'status\')';
|
||||
if (error?.response && error?.response?.status) {
|
||||
logger.error(
|
||||
`${logMessage} The request was made and the server responded with a status code that falls out of the range of 2xx: ${
|
||||
error.message ? error.message : ''
|
||||
}`,
|
||||
{
|
||||
headers: error.response.headers,
|
||||
status: error.response.status,
|
||||
data: error.response.data,
|
||||
},
|
||||
);
|
||||
} else if (error.request) {
|
||||
logger.error(
|
||||
`${logMessage} The request was made but no response was received: ${
|
||||
error.message ? error.message : ''
|
||||
}`,
|
||||
{
|
||||
request: error.request,
|
||||
},
|
||||
);
|
||||
} else if (error?.message && !error?.message?.includes(timedOutMessage)) {
|
||||
logger.error(`${logMessage} Something happened in setting up the request`, {
|
||||
message: error.message,
|
||||
});
|
||||
}
|
||||
const message = '[retrieveRun] Failed to retrieve run data:';
|
||||
logAxiosError({ message, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@ const path = require('path');
|
||||
const { v4 } = require('uuid');
|
||||
const {
|
||||
Constants,
|
||||
FilePurpose,
|
||||
ContentTypes,
|
||||
imageExtRegex,
|
||||
EModelEndpoint,
|
||||
AnnotationTypes,
|
||||
defaultOrderQuery,
|
||||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
@@ -434,13 +433,15 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co
|
||||
}
|
||||
|
||||
let addedCurrentMessage = false;
|
||||
const apiMessages = response.data.map((msg) => {
|
||||
if (msg.id === currentMessage.id) {
|
||||
addedCurrentMessage = true;
|
||||
return currentMessage;
|
||||
}
|
||||
return msg;
|
||||
});
|
||||
const apiMessages = response.data
|
||||
.map((msg) => {
|
||||
if (msg.id === currentMessage.id) {
|
||||
addedCurrentMessage = true;
|
||||
return currentMessage;
|
||||
}
|
||||
return msg;
|
||||
})
|
||||
.sort((a, b) => new Date(a.created_at) - new Date(b.created_at));
|
||||
|
||||
if (!addedCurrentMessage) {
|
||||
apiMessages.push(currentMessage);
|
||||
@@ -496,6 +497,44 @@ const recordUsage = async ({
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Safely replaces the annotated text within the specified range denoted by start_index and end_index,
|
||||
* after verifying that the text within that range matches the given annotation text.
|
||||
* Proceeds with the replacement even if a mismatch is found, but logs a warning.
|
||||
*
|
||||
* @param {string} originalText The original text content.
|
||||
* @param {number} start_index The starting index where replacement should begin.
|
||||
* @param {number} end_index The ending index where replacement should end.
|
||||
* @param {string} expectedText The text expected to be found in the specified range.
|
||||
* @param {string} replacementText The text to insert in place of the existing content.
|
||||
* @returns {string} The text with the replacement applied, regardless of text match.
|
||||
*/
|
||||
function replaceAnnotation(originalText, start_index, end_index, expectedText, replacementText) {
|
||||
if (start_index < 0 || end_index > originalText.length || start_index > end_index) {
|
||||
logger.warn(`Invalid range specified for annotation replacement.
|
||||
Attempting replacement with \`replace\` method instead...
|
||||
length: ${originalText.length}
|
||||
start_index: ${start_index}
|
||||
end_index: ${end_index}`);
|
||||
return originalText.replace(originalText, replacementText);
|
||||
}
|
||||
|
||||
const actualTextInRange = originalText.substring(start_index, end_index);
|
||||
|
||||
if (actualTextInRange !== expectedText) {
|
||||
logger.warn(`The text within the specified range does not match the expected annotation text.
|
||||
Attempting replacement with \`replace\` method instead...
|
||||
Expected: ${expectedText}
|
||||
Actual: ${actualTextInRange}`);
|
||||
|
||||
return originalText.replace(originalText, replacementText);
|
||||
}
|
||||
|
||||
const beforeText = originalText.substring(0, start_index);
|
||||
const afterText = originalText.substring(end_index);
|
||||
return beforeText + replacementText + afterText;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts, processes, and flattens messages to a single string.
|
||||
*
|
||||
@@ -509,89 +548,101 @@ async function processMessages({ openai, client, messages = [] }) {
|
||||
const sorted = messages.sort((a, b) => a.created_at - b.created_at);
|
||||
|
||||
let text = '';
|
||||
let edited = false;
|
||||
const sources = [];
|
||||
for (const message of sorted) {
|
||||
message.files = [];
|
||||
for (const content of message.content) {
|
||||
const processImageFile =
|
||||
content.type === 'image_file' && !client.processedFileIds.has(content.image_file?.file_id);
|
||||
if (processImageFile) {
|
||||
const { file_id } = content.image_file;
|
||||
const type = content.type;
|
||||
const contentType = content[type];
|
||||
const currentFileId = contentType?.file_id;
|
||||
|
||||
if (type === ContentTypes.IMAGE_FILE && !client.processedFileIds.has(currentFileId)) {
|
||||
const file = await retrieveAndProcessFile({
|
||||
openai,
|
||||
client,
|
||||
file_id,
|
||||
basename: `${file_id}.png`,
|
||||
file_id: currentFileId,
|
||||
basename: `${currentFileId}.png`,
|
||||
});
|
||||
client.processedFileIds.add(file_id);
|
||||
|
||||
client.processedFileIds.add(currentFileId);
|
||||
message.files.push(file);
|
||||
continue;
|
||||
}
|
||||
|
||||
text += (content.text?.value ?? '') + ' ';
|
||||
logger.debug('[processMessages] Processing message:', { value: text });
|
||||
let currentText = contentType?.value ?? '';
|
||||
|
||||
/** @type {{ annotations: Annotation[] }} */
|
||||
const { annotations } = contentType ?? {};
|
||||
|
||||
// Process annotations if they exist
|
||||
if (!content.text?.annotations?.length) {
|
||||
if (!annotations?.length) {
|
||||
text += currentText + ' ';
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug('[processMessages] Processing annotations:', content.text.annotations);
|
||||
for (const annotation of content.text.annotations) {
|
||||
logger.debug('Current annotation:', annotation);
|
||||
logger.debug('[processMessages] Processing annotations:', annotations);
|
||||
for (const annotation of annotations) {
|
||||
let file;
|
||||
const processFilePath =
|
||||
annotation.file_path && !client.processedFileIds.has(annotation.file_path?.file_id);
|
||||
const type = annotation.type;
|
||||
const annotationType = annotation[type];
|
||||
const file_id = annotationType?.file_id;
|
||||
const alreadyProcessed = client.processedFileIds.has(file_id);
|
||||
|
||||
if (processFilePath) {
|
||||
const basename = imageExtRegex.test(annotation.text)
|
||||
? path.basename(annotation.text)
|
||||
: null;
|
||||
const replaceCurrentAnnotation = (replacement = '') => {
|
||||
currentText = replaceAnnotation(
|
||||
currentText,
|
||||
annotation.start_index,
|
||||
annotation.end_index,
|
||||
annotation.text,
|
||||
replacement,
|
||||
);
|
||||
edited = true;
|
||||
};
|
||||
|
||||
if (alreadyProcessed) {
|
||||
const { file_id } = annotationType || {};
|
||||
file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true });
|
||||
} else if (type === AnnotationTypes.FILE_PATH) {
|
||||
const basename = path.basename(annotation.text);
|
||||
file = await retrieveAndProcessFile({
|
||||
openai,
|
||||
client,
|
||||
file_id: annotation.file_path.file_id,
|
||||
file_id,
|
||||
basename,
|
||||
});
|
||||
client.processedFileIds.add(annotation.file_path.file_id);
|
||||
}
|
||||
|
||||
const processFileCitation =
|
||||
annotation.file_citation &&
|
||||
!client.processedFileIds.has(annotation.file_citation?.file_id);
|
||||
|
||||
if (processFileCitation) {
|
||||
replaceCurrentAnnotation(file.filepath);
|
||||
} else if (type === AnnotationTypes.FILE_CITATION) {
|
||||
file = await retrieveAndProcessFile({
|
||||
openai,
|
||||
client,
|
||||
file_id: annotation.file_citation.file_id,
|
||||
file_id,
|
||||
unknownType: true,
|
||||
});
|
||||
client.processedFileIds.add(annotation.file_citation.file_id);
|
||||
sources.push(file.filename);
|
||||
replaceCurrentAnnotation(`^${sources.length}^`);
|
||||
}
|
||||
|
||||
if (!file && (annotation.file_path || annotation.file_citation)) {
|
||||
const { file_id } = annotation.file_citation || annotation.file_path || {};
|
||||
file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true });
|
||||
client.processedFileIds.add(file_id);
|
||||
}
|
||||
text += currentText + ' ';
|
||||
|
||||
if (!file) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.purpose && file.purpose === FilePurpose.Assistants) {
|
||||
text = text.replace(annotation.text, file.filename);
|
||||
} else if (file.filepath) {
|
||||
text = text.replace(annotation.text, file.filepath);
|
||||
}
|
||||
|
||||
client.processedFileIds.add(file_id);
|
||||
message.files.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { messages: sorted, text };
|
||||
if (sources.length) {
|
||||
text += '\n\n';
|
||||
for (let i = 0; i < sources.length; i++) {
|
||||
text += `^${i + 1}.^ ${sources[i]}${i === sources.length - 1 ? '' : '\n'}`;
|
||||
}
|
||||
}
|
||||
|
||||
return { messages: sorted, text, edited };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -12,8 +12,8 @@ const {
|
||||
openapiToFunction,
|
||||
validateAndParseOpenAPISpec,
|
||||
} = require('librechat-data-provider');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { loadActionSets, createActionTool, domainParser } = require('./ActionService');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { redactMessage } = require('~/config/parsers');
|
||||
@@ -147,7 +147,7 @@ const processVisionRequest = async (client, currentAction) => {
|
||||
|
||||
/**
|
||||
* Processes return required actions from run.
|
||||
* @param {OpenAIClient} client - OpenAI or StreamRunManager Client.
|
||||
* @param {OpenAIClient | StreamRunManager} client - OpenAI (legacy) or StreamRunManager Client.
|
||||
* @param {RequiredAction[]} requiredActions - The required actions to submit outputs for.
|
||||
* @returns {Promise<ToolOutputs>} The outputs of the tools.
|
||||
*/
|
||||
@@ -164,6 +164,8 @@ async function processRequiredActions(client, requiredActions) {
|
||||
functions: true,
|
||||
options: {
|
||||
processFileURL,
|
||||
req: client.req,
|
||||
uploadImageBuffer,
|
||||
openAIApiKey: client.apiKey,
|
||||
fileStrategy: client.req.app.locals.fileStrategy,
|
||||
returnMetadata: true,
|
||||
@@ -268,14 +270,20 @@ async function processRequiredActions(client, requiredActions) {
|
||||
if (!actionSets.length) {
|
||||
actionSets =
|
||||
(await loadActionSets({
|
||||
user: client.req.user.id,
|
||||
assistant_id: client.req.body.assistant_id,
|
||||
})) ?? [];
|
||||
}
|
||||
|
||||
const actionSet = actionSets.find((action) =>
|
||||
currentAction.tool.includes(domainParser(client.req, action.metadata.domain, true)),
|
||||
);
|
||||
let actionSet = null;
|
||||
let currentDomain = '';
|
||||
for (let action of actionSets) {
|
||||
const domain = await domainParser(client.req, action.metadata.domain, true);
|
||||
if (currentAction.tool.includes(domain)) {
|
||||
currentDomain = domain;
|
||||
actionSet = action;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!actionSet) {
|
||||
// TODO: try `function` if no action set is found
|
||||
@@ -297,10 +305,8 @@ async function processRequiredActions(client, requiredActions) {
|
||||
builders = requestBuilders;
|
||||
}
|
||||
|
||||
const functionName = currentAction.tool.replace(
|
||||
`${actionDelimiter}${domainParser(client.req, actionSet.metadata.domain, true)}`,
|
||||
'',
|
||||
);
|
||||
const functionName = currentAction.tool.replace(`${actionDelimiter}${currentDomain}`, '');
|
||||
|
||||
const requestBuilder = builders[functionName];
|
||||
|
||||
if (!requestBuilder) {
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
const { User, Key } = require('~/models');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { encrypt, decrypt } = require('~/server/utils');
|
||||
const { User, Key } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Updates the plugins for a user based on the action specified (install/uninstall).
|
||||
* @async
|
||||
* @param {Object} user - The user whose plugins are to be updated.
|
||||
* @param {string} pluginKey - The key of the plugin to install or uninstall.
|
||||
* @param {'install' | 'uninstall'} action - The action to perform, 'install' or 'uninstall'.
|
||||
* @returns {Promise<Object>} The result of the update operation.
|
||||
* @throws Logs the error internally if the update operation fails.
|
||||
* @description This function updates the plugin array of a user document based on the specified action.
|
||||
* It adds a plugin key to the plugins array for an 'install' action, and removes it for an 'uninstall' action.
|
||||
*/
|
||||
const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||
try {
|
||||
if (action === 'install') {
|
||||
@@ -21,14 +33,64 @@ const updateUserPluginsService = async (user, pluginKey, action) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<string>} The decrypted key value.
|
||||
* @throws {Error} Throws an error if the key is not found or if there is a problem during key retrieval.
|
||||
* @description This function searches for a user's key in the database using their userId and name.
|
||||
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
|
||||
* an error indicating that there is no user key available.
|
||||
*/
|
||||
const getUserKey = async ({ userId, name }) => {
|
||||
const keyValue = await Key.findOne({ userId, name }).lean();
|
||||
if (!keyValue) {
|
||||
throw new Error('User-provided key not found');
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return decrypt(keyValue.value);
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<Record<string,string>>} The decrypted and parsed key values.
|
||||
* @throws {Error} Throws an error if the key is invalid or if there is a problem during key value parsing.
|
||||
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
|
||||
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
|
||||
* it throws an error indicating that the user key is invalid.
|
||||
*/
|
||||
const getUserKeyValues = async ({ userId, name }) => {
|
||||
let userValues = await getUserKey({ userId, name });
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return userValues;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the expiry information of a user's key identified by userId and name.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @returns {Promise<{expiresAt: Date | null}>} The expiry date of the key or null if the key doesn't exist.
|
||||
* @description This function fetches a user's key from the database using their userId and name and
|
||||
* returns its expiry date. If the key is not found, it returns null for the expiry date.
|
||||
*/
|
||||
const getUserKeyExpiry = async ({ userId, name }) => {
|
||||
const keyValue = await Key.findOne({ userId, name }).lean();
|
||||
if (!keyValue) {
|
||||
@@ -37,6 +99,18 @@ const getUserKeyExpiry = async ({ userId, name }) => {
|
||||
return { expiresAt: keyValue.expiresAt };
|
||||
};
|
||||
|
||||
/**
|
||||
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} params.name - The name associated with the key.
|
||||
* @param {string} params.value - The value to be encrypted and stored as the key's value.
|
||||
* @param {Date} params.expiresAt - The expiry date for the key.
|
||||
* @returns {Promise<Object>} The updated or newly inserted key document.
|
||||
* @description This function either updates an existing user key or inserts a new one into the database,
|
||||
* after encrypting the provided value. It sets the provided expiry date for the key.
|
||||
*/
|
||||
const updateUserKey = async ({ userId, name, value, expiresAt }) => {
|
||||
const encryptedValue = encrypt(value);
|
||||
return await Key.findOneAndUpdate(
|
||||
@@ -51,6 +125,18 @@ const updateUserKey = async ({ userId, name, value, expiresAt }) => {
|
||||
).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
|
||||
* @async
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier for the user.
|
||||
* @param {string} [params.name] - The name associated with the key to delete. If not provided and all is true, deletes all keys.
|
||||
* @param {boolean} [params.all=false] - Whether to delete all keys for the user.
|
||||
* @returns {Promise<Object>} The result of the deletion operation.
|
||||
* @description This function deletes a specific key or all keys for a user from the database.
|
||||
* If a name is provided and all is false, it deletes only the key with that name.
|
||||
* If all is true, it ignores the name and deletes all keys for the user.
|
||||
*/
|
||||
const deleteUserKey = async ({ userId, name, all = false }) => {
|
||||
if (all) {
|
||||
return await Key.deleteMany({ userId });
|
||||
@@ -59,11 +145,23 @@ const deleteUserKey = async ({ userId, name, all = false }) => {
|
||||
await Key.findOneAndDelete({ userId, name }).lean();
|
||||
};
|
||||
|
||||
const checkUserKeyExpiry = (expiresAt, message) => {
|
||||
/**
|
||||
* Checks if a user key has expired based on the provided expiration date and endpoint.
|
||||
* If the key has expired, it throws an Error with details including the type of error, the expiration date, and the endpoint.
|
||||
*
|
||||
* @param {string} expiresAt - The expiration date of the user key in a format that can be parsed by the Date constructor.
|
||||
* @param {string} endpoint - The endpoint associated with the user key to be checked.
|
||||
* @throws {Error} Throws an error if the user key has expired. The error message is a stringified JSON object
|
||||
* containing the type of error (`ErrorTypes.EXPIRED_USER_KEY`), the expiration date in the local string format, and the endpoint.
|
||||
*/
|
||||
const checkUserKeyExpiry = (expiresAt, endpoint) => {
|
||||
const expiresAtDate = new Date(expiresAt);
|
||||
if (expiresAtDate < new Date()) {
|
||||
const expiryStr = `User-provided key expired at ${expiresAtDate.toLocaleString()}`;
|
||||
const errorMessage = message ? `${message}\n${expiryStr}` : expiryStr;
|
||||
const errorMessage = JSON.stringify({
|
||||
type: ErrorTypes.EXPIRED_USER_KEY,
|
||||
expiredAt: expiresAtDate.toLocaleString(),
|
||||
endpoint,
|
||||
});
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
};
|
||||
@@ -71,6 +169,7 @@ const checkUserKeyExpiry = (expiresAt, message) => {
|
||||
module.exports = {
|
||||
updateUserPluginsService,
|
||||
getUserKey,
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
updateUserKey,
|
||||
deleteUserKey,
|
||||
|
||||
46
api/server/services/start/assistants.js
Normal file
46
api/server/services/start/assistants.js
Normal file
@@ -0,0 +1,46 @@
|
||||
const {
|
||||
Capabilities,
|
||||
EModelEndpoint,
|
||||
assistantEndpointSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sets up the minimum, default Assistants configuration if Azure OpenAI Assistants option is enabled.
|
||||
* @returns {Partial<TAssistantEndpoint>} The Assistants endpoint configuration.
|
||||
*/
|
||||
function azureAssistantsDefaults() {
|
||||
return {
|
||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the Assistants configuration from the config (`librechat.yaml`) file.
|
||||
* @param {TCustomConfig} config - The loaded custom configuration.
|
||||
* @param {Partial<TAssistantEndpoint>} [prevConfig]
|
||||
* - The previously loaded assistants configuration from Azure OpenAI Assistants option.
|
||||
* @returns {Partial<TAssistantEndpoint>} The Assistants endpoint configuration.
|
||||
*/
|
||||
function assistantsConfigSetup(config, prevConfig = {}) {
|
||||
const assistantsConfig = config.endpoints[EModelEndpoint.assistants];
|
||||
const parsedConfig = assistantEndpointSchema.parse(assistantsConfig);
|
||||
if (assistantsConfig.supportedIds?.length && assistantsConfig.excludedIds?.length) {
|
||||
logger.warn(
|
||||
`Both \`supportedIds\` and \`excludedIds\` are defined for the ${EModelEndpoint.assistants} endpoint; \`excludedIds\` field will be ignored.`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...prevConfig,
|
||||
retrievalModels: parsedConfig.retrievalModels,
|
||||
disableBuilder: parsedConfig.disableBuilder,
|
||||
pollIntervalMs: parsedConfig.pollIntervalMs,
|
||||
supportedIds: parsedConfig.supportedIds,
|
||||
capabilities: parsedConfig.capabilities,
|
||||
excludedIds: parsedConfig.excludedIds,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { azureAssistantsDefaults, assistantsConfigSetup };
|
||||
54
api/server/services/start/azureOpenAI.js
Normal file
54
api/server/services/start/azureOpenAI.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const {
|
||||
EModelEndpoint,
|
||||
validateAzureGroups,
|
||||
mapModelToAzureConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sets up the Azure OpenAI configuration from the config (`librechat.yaml`) file.
|
||||
* @param {TCustomConfig} config - The loaded custom configuration.
|
||||
* @returns {TAzureConfig} The Azure OpenAI configuration.
|
||||
*/
|
||||
function azureConfigSetup(config) {
|
||||
const { groups, ...azureConfiguration } = config.endpoints[EModelEndpoint.azureOpenAI];
|
||||
/** @type {TAzureConfigValidationResult} */
|
||||
const { isValid, modelNames, modelGroupMap, groupMap, errors } = validateAzureGroups(groups);
|
||||
|
||||
if (!isValid) {
|
||||
const errorString = errors.join('\n');
|
||||
const errorMessage = 'Invalid Azure OpenAI configuration:\n' + errorString;
|
||||
logger.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
const assistantModels = [];
|
||||
const assistantGroups = new Set();
|
||||
for (const modelName of modelNames) {
|
||||
mapModelToAzureConfig({ modelName, modelGroupMap, groupMap });
|
||||
const groupName = modelGroupMap?.[modelName]?.group;
|
||||
const modelGroup = groupMap?.[groupName];
|
||||
let supportsAssistants = modelGroup?.assistants || modelGroup?.[modelName]?.assistants;
|
||||
if (supportsAssistants) {
|
||||
assistantModels.push(modelName);
|
||||
!assistantGroups.has(groupName) && assistantGroups.add(groupName);
|
||||
}
|
||||
}
|
||||
|
||||
if (azureConfiguration.assistants && assistantModels.length === 0) {
|
||||
throw new Error(
|
||||
'No Azure models are configured to support assistants. Please remove the `assistants` field or configure at least one model to support assistants.',
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
assistantModels,
|
||||
assistantGroups: Array.from(assistantGroups),
|
||||
...azureConfiguration,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { azureConfigSetup };
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user