Compare commits
83 Commits
cleanup/co
...
feat/granu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95214c43c2 | ||
|
|
f8c8b89f4d | ||
|
|
5512c55d71 | ||
|
|
e07067c86d | ||
|
|
2bd1eb3cca | ||
|
|
57084f2caa | ||
|
|
669af746ed | ||
|
|
f9994d1547 | ||
|
|
c62a23fafc | ||
|
|
6bbefcd16e | ||
|
|
1016a33b89 | ||
|
|
c9aa10d3d5 | ||
|
|
5979efd607 | ||
|
|
25b97ba388 | ||
|
|
e97444a863 | ||
|
|
717f61d878 | ||
|
|
dbb234d3bf | ||
|
|
718af1ada1 | ||
|
|
ee4cdaed20 | ||
|
|
f897420586 | ||
|
|
004f128aec | ||
|
|
c07a342c97 | ||
|
|
fd64e2380e | ||
|
|
3f82aed9eb | ||
|
|
0143ae5728 | ||
|
|
eed43e6662 | ||
|
|
fa54c9ae90 | ||
|
|
97085073d2 | ||
|
|
299c484c7a | ||
|
|
d53cd1f391 | ||
|
|
a5e8d009a1 | ||
|
|
3e4b01de82 | ||
|
|
8b15bb2ed6 | ||
|
|
c7e4523d7c | ||
|
|
dba0ec4320 | ||
|
|
3c9357580e | ||
|
|
10c0d7d474 | ||
|
|
ec7370dfe9 | ||
|
|
b412455e9d | ||
|
|
3af2666890 | ||
|
|
0103b4b08a | ||
|
|
5eb0703f78 | ||
|
|
4419e2c294 | ||
|
|
5f2d1c5dc9 | ||
|
|
46ff008b07 | ||
|
|
55f79bd2d1 | ||
|
|
1bd874591a | ||
|
|
6488873bad | ||
|
|
13c7ceb918 | ||
|
|
cdf42b3a03 | ||
|
|
c2a18f61b4 | ||
|
|
a57224c1d5 | ||
|
|
118ad943c9 | ||
|
|
272522452a | ||
|
|
b0054c775a | ||
|
|
9bb9aba8ec | ||
|
|
293ac02b95 | ||
|
|
29ef91b4dd | ||
|
|
cd7dd576c1 | ||
|
|
c22d74d41e | ||
|
|
2c39ccd2af | ||
|
|
53df6a1a71 | ||
|
|
dff4fcac00 | ||
|
|
be4cf5846c | ||
|
|
6bb78247b3 | ||
|
|
cbddd394a5 | ||
|
|
830be18b90 | ||
|
|
32bab33499 | ||
|
|
1806b70418 | ||
|
|
5ccdb83e5e | ||
|
|
8cade2120d | ||
|
|
c7f2ee36c5 | ||
|
|
f2f4bf87ca | ||
|
|
442b149d55 | ||
|
|
aca89091d9 | ||
|
|
37c94beeac | ||
|
|
80bc49db8d | ||
|
|
d3a504857a | ||
|
|
09e3500d39 | ||
|
|
8458401ce6 | ||
|
|
f9d40784f0 | ||
|
|
a2fc7d312a | ||
|
|
4cbab86b45 |
27
.env.example
27
.env.example
@@ -485,6 +485,21 @@ SAML_IMAGE_URL=
|
|||||||
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
# SAML_USE_AUTHN_RESPONSE_SIGNED=
|
||||||
|
|
||||||
|
|
||||||
|
#===============================================#
|
||||||
|
# Microsoft Graph API / Entra ID Integration #
|
||||||
|
#===============================================#
|
||||||
|
|
||||||
|
# Enable Entra ID people search integration in permissions/sharing system
|
||||||
|
# When enabled, the people picker will search both local database and Entra ID
|
||||||
|
USE_ENTRA_ID_FOR_PEOPLE_SEARCH=false
|
||||||
|
|
||||||
|
# When enabled, entra id groups owners will be considered as members of the group
|
||||||
|
ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS=false
|
||||||
|
|
||||||
|
# Microsoft Graph API scopes needed for people/group search
|
||||||
|
# Default scopes provide access to user profiles and group memberships
|
||||||
|
OPENID_GRAPH_SCOPES=User.Read,People.Read,GroupMember.Read.All
|
||||||
|
|
||||||
# LDAP
|
# LDAP
|
||||||
LDAP_URL=
|
LDAP_URL=
|
||||||
LDAP_BIND_DN=
|
LDAP_BIND_DN=
|
||||||
@@ -515,6 +530,18 @@ EMAIL_PASSWORD=
|
|||||||
EMAIL_FROM_NAME=
|
EMAIL_FROM_NAME=
|
||||||
EMAIL_FROM=noreply@librechat.ai
|
EMAIL_FROM=noreply@librechat.ai
|
||||||
|
|
||||||
|
#========================#
|
||||||
|
# Mailgun API #
|
||||||
|
#========================#
|
||||||
|
|
||||||
|
# MAILGUN_API_KEY=your-mailgun-api-key
|
||||||
|
# MAILGUN_DOMAIN=mg.yourdomain.com
|
||||||
|
# EMAIL_FROM=noreply@yourdomain.com
|
||||||
|
# EMAIL_FROM_NAME="LibreChat"
|
||||||
|
|
||||||
|
# # Optional: For EU region
|
||||||
|
# MAILGUN_HOST=https://api.eu.mailgun.net
|
||||||
|
|
||||||
#========================#
|
#========================#
|
||||||
# Firebase CDN #
|
# Firebase CDN #
|
||||||
#========================#
|
#========================#
|
||||||
|
|||||||
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@@ -30,8 +30,8 @@ Project maintainers have the right and responsibility to remove, edit, or reject
|
|||||||
2. Install typescript globally: `npm i -g typescript`.
|
2. Install typescript globally: `npm i -g typescript`.
|
||||||
3. Run `npm ci` to install dependencies.
|
3. Run `npm ci` to install dependencies.
|
||||||
4. Build the data provider: `npm run build:data-provider`.
|
4. Build the data provider: `npm run build:data-provider`.
|
||||||
5. Build MCP: `npm run build:mcp`.
|
5. Build data schemas: `npm run build:data-schemas`.
|
||||||
6. Build data schemas: `npm run build:data-schemas`.
|
6. Build API methods: `npm run build:api`.
|
||||||
7. Setup and run unit tests:
|
7. Setup and run unit tests:
|
||||||
- Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`.
|
- Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`.
|
||||||
- Run backend unit tests: `npm run test:api`.
|
- Run backend unit tests: `npm run test:api`.
|
||||||
|
|||||||
14
.github/workflows/backend-review.yml
vendored
14
.github/workflows/backend-review.yml
vendored
@@ -7,6 +7,7 @@ on:
|
|||||||
- release/*
|
- release/*
|
||||||
paths:
|
paths:
|
||||||
- 'api/**'
|
- 'api/**'
|
||||||
|
- 'packages/api/**'
|
||||||
jobs:
|
jobs:
|
||||||
tests_Backend:
|
tests_Backend:
|
||||||
name: Run Backend unit tests
|
name: Run Backend unit tests
|
||||||
@@ -36,12 +37,12 @@ jobs:
|
|||||||
- name: Install Data Provider Package
|
- name: Install Data Provider Package
|
||||||
run: npm run build:data-provider
|
run: npm run build:data-provider
|
||||||
|
|
||||||
- name: Install MCP Package
|
|
||||||
run: npm run build:mcp
|
|
||||||
|
|
||||||
- name: Install Data Schemas Package
|
- name: Install Data Schemas Package
|
||||||
run: npm run build:data-schemas
|
run: npm run build:data-schemas
|
||||||
|
|
||||||
|
- name: Install API Package
|
||||||
|
run: npm run build:api
|
||||||
|
|
||||||
- name: Create empty auth.json file
|
- name: Create empty auth.json file
|
||||||
run: |
|
run: |
|
||||||
mkdir -p api/data
|
mkdir -p api/data
|
||||||
@@ -66,5 +67,8 @@ jobs:
|
|||||||
- name: Run librechat-data-provider unit tests
|
- name: Run librechat-data-provider unit tests
|
||||||
run: cd packages/data-provider && npm run test:ci
|
run: cd packages/data-provider && npm run test:ci
|
||||||
|
|
||||||
- name: Run librechat-mcp unit tests
|
- name: Run @librechat/data-schemas unit tests
|
||||||
run: cd packages/mcp && npm run test:ci
|
run: cd packages/data-schemas && npm run test:ci
|
||||||
|
|
||||||
|
- name: Run @librechat/api unit tests
|
||||||
|
run: cd packages/api && npm run test:ci
|
||||||
17
.github/workflows/deploy-dev.yml
vendored
17
.github/workflows/deploy-dev.yml
vendored
@@ -2,7 +2,7 @@ name: Update Test Server
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_run:
|
workflow_run:
|
||||||
workflows: ["Docker Dev Images Build"]
|
workflows: ["Docker Dev Branch Images Build"]
|
||||||
types:
|
types:
|
||||||
- completed
|
- completed
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -12,7 +12,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: |
|
if: |
|
||||||
github.repository == 'danny-avila/LibreChat' &&
|
github.repository == 'danny-avila/LibreChat' &&
|
||||||
(github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success')
|
(github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.head_branch == 'dev'))
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -29,13 +30,17 @@ jobs:
|
|||||||
DO_USER: ${{ secrets.DO_USER }}
|
DO_USER: ${{ secrets.DO_USER }}
|
||||||
run: |
|
run: |
|
||||||
ssh -o StrictHostKeyChecking=no ${DO_USER}@${DO_HOST} << EOF
|
ssh -o StrictHostKeyChecking=no ${DO_USER}@${DO_HOST} << EOF
|
||||||
sudo -i -u danny bash << EEOF
|
sudo -i -u danny bash << 'EEOF'
|
||||||
cd ~/LibreChat && \
|
cd ~/LibreChat && \
|
||||||
git fetch origin main && \
|
git fetch origin main && \
|
||||||
npm run update:deployed && \
|
sudo npm run stop:deployed && \
|
||||||
|
sudo docker images --format "{{.Repository}}:{{.ID}}" | grep -E "lc-dev|librechat" | cut -d: -f2 | xargs -r sudo docker rmi -f || true && \
|
||||||
|
sudo npm run update:deployed && \
|
||||||
|
git checkout dev && \
|
||||||
|
git pull origin dev && \
|
||||||
git checkout do-deploy && \
|
git checkout do-deploy && \
|
||||||
git rebase main && \
|
git rebase dev && \
|
||||||
npm run start:deployed && \
|
sudo npm run start:deployed && \
|
||||||
echo "Update completed. Application should be running now."
|
echo "Update completed. Application should be running now."
|
||||||
EEOF
|
EEOF
|
||||||
EOF
|
EOF
|
||||||
|
|||||||
72
.github/workflows/dev-branch-images.yml
vendored
Normal file
72
.github/workflows/dev-branch-images.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
name: Docker Dev Branch Images Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- 'api/**'
|
||||||
|
- 'client/**'
|
||||||
|
- 'packages/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- target: api-build
|
||||||
|
file: Dockerfile.multi
|
||||||
|
image_name: lc-dev-api
|
||||||
|
- target: node
|
||||||
|
file: Dockerfile
|
||||||
|
image_name: lc-dev
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Check out the repository
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Set up QEMU
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
# Set up Docker Buildx
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
# Log in to GitHub Container Registry
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Login to Docker Hub
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Prepare the environment
|
||||||
|
- name: Prepare environment
|
||||||
|
run: |
|
||||||
|
cp .env.example .env
|
||||||
|
|
||||||
|
# Build and push Docker images for each target
|
||||||
|
- name: Build and push Docker images
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ${{ matrix.file }}
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:${{ github.sha }}
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ matrix.image_name }}:latest
|
||||||
|
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:${{ github.sha }}
|
||||||
|
${{ secrets.DOCKERHUB_USERNAME }}/${{ matrix.image_name }}:latest
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
target: ${{ matrix.target }}
|
||||||
3
.github/workflows/i18n-unused-keys.yml
vendored
3
.github/workflows/i18n-unused-keys.yml
vendored
@@ -5,12 +5,13 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "client/src/**"
|
- "client/src/**"
|
||||||
- "api/**"
|
- "api/**"
|
||||||
|
- "packages/data-provider/src/**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
detect-unused-i18n-keys:
|
detect-unused-i18n-keys:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write # Required for posting PR comments
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|||||||
2
.github/workflows/unused-packages.yml
vendored
2
.github/workflows/unused-packages.yml
vendored
@@ -98,6 +98,8 @@ jobs:
|
|||||||
cd client
|
cd client
|
||||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
||||||
|
# Filter out false positives
|
||||||
|
UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "")
|
||||||
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||||
echo "$UNUSED" >> $GITHUB_ENV
|
echo "$UNUSED" >> $GITHUB_ENV
|
||||||
echo "EOF" >> $GITHUB_ENV
|
echo "EOF" >> $GITHUB_ENV
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -55,6 +55,7 @@ bower_components/
|
|||||||
# AI
|
# AI
|
||||||
.clineignore
|
.clineignore
|
||||||
.cursor
|
.cursor
|
||||||
|
.aider*
|
||||||
|
|
||||||
# Floobits
|
# Floobits
|
||||||
.floo
|
.floo
|
||||||
|
|||||||
3
.vscode/launch.json
vendored
3
.vscode/launch.json
vendored
@@ -8,7 +8,8 @@
|
|||||||
"skipFiles": ["<node_internals>/**"],
|
"skipFiles": ["<node_internals>/**"],
|
||||||
"program": "${workspaceFolder}/api/server/index.js",
|
"program": "${workspaceFolder}/api/server/index.js",
|
||||||
"env": {
|
"env": {
|
||||||
"NODE_ENV": "production"
|
"NODE_ENV": "production",
|
||||||
|
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
|
||||||
},
|
},
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"envFile": "${workspaceFolder}/.env"
|
"envFile": "${workspaceFolder}/.env"
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
|
|||||||
npm config set fetch-retry-mintimeout 15000
|
npm config set fetch-retry-mintimeout 15000
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
COPY packages/data-provider/package*.json ./packages/data-provider/
|
COPY packages/data-provider/package*.json ./packages/data-provider/
|
||||||
COPY packages/mcp/package*.json ./packages/mcp/
|
COPY packages/api/package*.json ./packages/api/
|
||||||
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
||||||
COPY client/package*.json ./client/
|
COPY client/package*.json ./client/
|
||||||
COPY api/package*.json ./api/
|
COPY api/package*.json ./api/
|
||||||
@@ -24,26 +24,27 @@ FROM base-min AS base
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
# Build data-provider
|
# Build `data-provider` package
|
||||||
FROM base AS data-provider-build
|
FROM base AS data-provider-build
|
||||||
WORKDIR /app/packages/data-provider
|
WORKDIR /app/packages/data-provider
|
||||||
COPY packages/data-provider ./
|
COPY packages/data-provider ./
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
# Build mcp package
|
# Build `data-schemas` package
|
||||||
FROM base AS mcp-build
|
|
||||||
WORKDIR /app/packages/mcp
|
|
||||||
COPY packages/mcp ./
|
|
||||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Build data-schemas
|
|
||||||
FROM base AS data-schemas-build
|
FROM base AS data-schemas-build
|
||||||
WORKDIR /app/packages/data-schemas
|
WORKDIR /app/packages/data-schemas
|
||||||
COPY packages/data-schemas ./
|
COPY packages/data-schemas ./
|
||||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
|
# Build `api` package
|
||||||
|
FROM base AS api-package-build
|
||||||
|
WORKDIR /app/packages/api
|
||||||
|
COPY packages/api ./
|
||||||
|
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||||
|
COPY --from=data-schemas-build /app/packages/data-schemas/dist /app/packages/data-schemas/dist
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
# Client build
|
# Client build
|
||||||
FROM base AS client-build
|
FROM base AS client-build
|
||||||
WORKDIR /app/client
|
WORKDIR /app/client
|
||||||
@@ -63,8 +64,8 @@ RUN npm ci --omit=dev
|
|||||||
COPY api ./api
|
COPY api ./api
|
||||||
COPY config ./config
|
COPY config ./config
|
||||||
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
|
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
|
||||||
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
|
|
||||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
|
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
|
||||||
|
COPY --from=api-package-build /app/packages/api/dist ./packages/api/dist
|
||||||
COPY --from=client-build /app/client/dist ./client/dist
|
COPY --from=client-build /app/client/dist ./client/dist
|
||||||
WORKDIR /app/api
|
WORKDIR /app/api
|
||||||
EXPOSE 3080
|
EXPOSE 3080
|
||||||
|
|||||||
@@ -150,8 +150,8 @@ Click on the thumbnail to open the video☝️
|
|||||||
|
|
||||||
**Other:**
|
**Other:**
|
||||||
- **Website:** [librechat.ai](https://librechat.ai)
|
- **Website:** [librechat.ai](https://librechat.ai)
|
||||||
- **Documentation:** [docs.librechat.ai](https://docs.librechat.ai)
|
- **Documentation:** [librechat.ai/docs](https://librechat.ai/docs)
|
||||||
- **Blog:** [blog.librechat.ai](https://blog.librechat.ai)
|
- **Blog:** [librechat.ai/blog](https://librechat.ai/blog)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ const {
|
|||||||
validateVisionModel,
|
validateVisionModel,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { SplitStreamHandler: _Handler } = require('@librechat/agents');
|
const { SplitStreamHandler: _Handler } = require('@librechat/agents');
|
||||||
|
const { Tokenizer, createFetch, createStreamEventHandlers } = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
truncateText,
|
truncateText,
|
||||||
formatMessage,
|
formatMessage,
|
||||||
@@ -26,8 +27,6 @@ const {
|
|||||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
||||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const { createFetch, createStreamEventHandlers } = require('./generators');
|
|
||||||
const Tokenizer = require('~/server/services/Tokenizer');
|
|
||||||
const { sleep } = require('~/server/utils');
|
const { sleep } = require('~/server/utils');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
@@ -191,10 +190,11 @@ class AnthropicClient extends BaseClient {
|
|||||||
reverseProxyUrl: this.options.reverseProxyUrl,
|
reverseProxyUrl: this.options.reverseProxyUrl,
|
||||||
}),
|
}),
|
||||||
apiKey: this.apiKey,
|
apiKey: this.apiKey,
|
||||||
|
fetchOptions: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (this.options.proxy) {
|
if (this.options.proxy) {
|
||||||
options.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
options.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.reverseProxyUrl) {
|
if (this.options.reverseProxyUrl) {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ const { Keyv } = require('keyv');
|
|||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const { CohereClient } = require('cohere-ai');
|
const { CohereClient } = require('cohere-ai');
|
||||||
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
|
||||||
|
const { constructAzureURL, genAzureChatCompletion } = require('@librechat/api');
|
||||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||||
const {
|
const {
|
||||||
ImageDetail,
|
ImageDetail,
|
||||||
@@ -10,9 +11,9 @@ const {
|
|||||||
CohereConstants,
|
CohereConstants,
|
||||||
mapModelToAzureConfig,
|
mapModelToAzureConfig,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { extractBaseURL, constructAzureURL, genAzureChatCompletion } = require('~/utils');
|
|
||||||
const { createContextHandlers } = require('./prompts');
|
const { createContextHandlers } = require('./prompts');
|
||||||
const { createCoherePayload } = require('./llm');
|
const { createCoherePayload } = require('./llm');
|
||||||
|
const { extractBaseURL } = require('~/utils');
|
||||||
const BaseClient = require('./BaseClient');
|
const BaseClient = require('./BaseClient');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
@@ -244,9 +245,9 @@ class ChatGPTClient extends BaseClient {
|
|||||||
|
|
||||||
baseURL = this.langchainProxy
|
baseURL = this.langchainProxy
|
||||||
? constructAzureURL({
|
? constructAzureURL({
|
||||||
baseURL: this.langchainProxy,
|
baseURL: this.langchainProxy,
|
||||||
azureOptions: this.azure,
|
azureOptions: this.azure,
|
||||||
})
|
})
|
||||||
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
: this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
||||||
|
|
||||||
if (this.options.forcePrompt) {
|
if (this.options.forcePrompt) {
|
||||||
@@ -339,7 +340,6 @@ class ChatGPTClient extends BaseClient {
|
|||||||
opts.body = JSON.stringify(modelOptions);
|
opts.body = JSON.stringify(modelOptions);
|
||||||
|
|
||||||
if (modelOptions.stream) {
|
if (modelOptions.stream) {
|
||||||
|
|
||||||
return new Promise(async (resolve, reject) => {
|
return new Promise(async (resolve, reject) => {
|
||||||
try {
|
try {
|
||||||
let done = false;
|
let done = false;
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
const { google } = require('googleapis');
|
const { google } = require('googleapis');
|
||||||
|
const { Tokenizer } = require('@librechat/api');
|
||||||
const { concat } = require('@langchain/core/utils/stream');
|
const { concat } = require('@langchain/core/utils/stream');
|
||||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||||
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
||||||
@@ -19,7 +20,6 @@ const {
|
|||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
|
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||||
const Tokenizer = require('~/server/services/Tokenizer');
|
|
||||||
const { spendTokens } = require('~/models/spendTokens');
|
const { spendTokens } = require('~/models/spendTokens');
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
const { sleep } = require('~/server/utils');
|
const { sleep } = require('~/server/utils');
|
||||||
@@ -34,7 +34,8 @@ const BaseClient = require('./BaseClient');
|
|||||||
|
|
||||||
const loc = process.env.GOOGLE_LOC || 'us-central1';
|
const loc = process.env.GOOGLE_LOC || 'us-central1';
|
||||||
const publisher = 'google';
|
const publisher = 'google';
|
||||||
const endpointPrefix = `${loc}-aiplatform.googleapis.com`;
|
const endpointPrefix =
|
||||||
|
loc === 'global' ? 'aiplatform.googleapis.com' : `${loc}-aiplatform.googleapis.com`;
|
||||||
|
|
||||||
const settings = endpointSettings[EModelEndpoint.google];
|
const settings = endpointSettings[EModelEndpoint.google];
|
||||||
const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/;
|
const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/;
|
||||||
@@ -236,11 +237,11 @@ class GoogleClient extends BaseClient {
|
|||||||
msg.content = (
|
msg.content = (
|
||||||
!Array.isArray(msg.content)
|
!Array.isArray(msg.content)
|
||||||
? [
|
? [
|
||||||
{
|
{
|
||||||
type: ContentTypes.TEXT,
|
type: ContentTypes.TEXT,
|
||||||
[ContentTypes.TEXT]: msg.content,
|
[ContentTypes.TEXT]: msg.content,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
: msg.content
|
: msg.content
|
||||||
).concat(message.image_urls);
|
).concat(message.image_urls);
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const { Ollama } = require('ollama');
|
const { Ollama } = require('ollama');
|
||||||
|
const { sleep } = require('@librechat/agents');
|
||||||
|
const { logAxiosError } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { Constants } = require('librechat-data-provider');
|
const { Constants } = require('librechat-data-provider');
|
||||||
const { deriveBaseURL, logAxiosError } = require('~/utils');
|
const { deriveBaseURL } = require('~/utils');
|
||||||
const { sleep } = require('~/server/utils');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const ollamaPayloadSchema = z.object({
|
const ollamaPayloadSchema = z.object({
|
||||||
mirostat: z.number().optional(),
|
mirostat: z.number().optional(),
|
||||||
@@ -67,7 +68,7 @@ class OllamaClient {
|
|||||||
return models;
|
return models;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const logMessage =
|
const logMessage =
|
||||||
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).';
|
"Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn't start with `ollama` (case-insensitive).";
|
||||||
logAxiosError({ message: logMessage, error });
|
logAxiosError({ message: logMessage, error });
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,14 @@
|
|||||||
const { OllamaClient } = require('./OllamaClient');
|
const { OllamaClient } = require('./OllamaClient');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
|
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
|
||||||
|
const {
|
||||||
|
isEnabled,
|
||||||
|
Tokenizer,
|
||||||
|
createFetch,
|
||||||
|
constructAzureURL,
|
||||||
|
genAzureChatCompletion,
|
||||||
|
createStreamEventHandlers,
|
||||||
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
ImageDetail,
|
ImageDetail,
|
||||||
@@ -16,13 +24,6 @@ const {
|
|||||||
validateVisionModel,
|
validateVisionModel,
|
||||||
mapModelToAzureConfig,
|
mapModelToAzureConfig,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const {
|
|
||||||
extractBaseURL,
|
|
||||||
constructAzureURL,
|
|
||||||
getModelMaxTokens,
|
|
||||||
genAzureChatCompletion,
|
|
||||||
getModelMaxOutputTokens,
|
|
||||||
} = require('~/utils');
|
|
||||||
const {
|
const {
|
||||||
truncateText,
|
truncateText,
|
||||||
formatMessage,
|
formatMessage,
|
||||||
@@ -30,10 +31,9 @@ const {
|
|||||||
titleInstruction,
|
titleInstruction,
|
||||||
createContextHandlers,
|
createContextHandlers,
|
||||||
} = require('./prompts');
|
} = require('./prompts');
|
||||||
|
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const { createFetch, createStreamEventHandlers } = require('./generators');
|
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
||||||
const { addSpaceIfNeeded, isEnabled, sleep } = require('~/server/utils');
|
|
||||||
const Tokenizer = require('~/server/services/Tokenizer');
|
|
||||||
const { spendTokens } = require('~/models/spendTokens');
|
const { spendTokens } = require('~/models/spendTokens');
|
||||||
const { handleOpenAIErrors } = require('./tools/util');
|
const { handleOpenAIErrors } = require('./tools/util');
|
||||||
const { createLLM, RunManager } = require('./llm');
|
const { createLLM, RunManager } = require('./llm');
|
||||||
@@ -1159,6 +1159,7 @@ ${convo}
|
|||||||
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
|
||||||
const opts = {
|
const opts = {
|
||||||
baseURL,
|
baseURL,
|
||||||
|
fetchOptions: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
if (this.useOpenRouter) {
|
if (this.useOpenRouter) {
|
||||||
@@ -1177,7 +1178,7 @@ ${convo}
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.proxy) {
|
if (this.options.proxy) {
|
||||||
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TAzureConfig | undefined} */
|
/** @type {TAzureConfig | undefined} */
|
||||||
@@ -1395,7 +1396,7 @@ ${convo}
|
|||||||
...modelOptions,
|
...modelOptions,
|
||||||
stream: true,
|
stream: true,
|
||||||
};
|
};
|
||||||
const stream = await openai.beta.chat.completions
|
const stream = await openai.chat.completions
|
||||||
.stream(params)
|
.stream(params)
|
||||||
.on('abort', () => {
|
.on('abort', () => {
|
||||||
/* Do nothing here */
|
/* Do nothing here */
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
const fetch = require('node-fetch');
|
|
||||||
const { GraphEvents } = require('@librechat/agents');
|
|
||||||
const { logger, sendEvent } = require('~/config');
|
|
||||||
const { sleep } = require('~/server/utils');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Makes a function to make HTTP request and logs the process.
|
|
||||||
* @param {Object} params
|
|
||||||
* @param {boolean} [params.directEndpoint] - Whether to use a direct endpoint.
|
|
||||||
* @param {string} [params.reverseProxyUrl] - The reverse proxy URL to use for the request.
|
|
||||||
* @returns {Promise<Response>} - A promise that resolves to the response of the fetch request.
|
|
||||||
*/
|
|
||||||
function createFetch({ directEndpoint = false, reverseProxyUrl = '' }) {
|
|
||||||
/**
|
|
||||||
* Makes an HTTP request and logs the process.
|
|
||||||
* @param {RequestInfo} url - The URL to make the request to. Can be a string or a Request object.
|
|
||||||
* @param {RequestInit} [init] - Optional init options for the request.
|
|
||||||
* @returns {Promise<Response>} - A promise that resolves to the response of the fetch request.
|
|
||||||
*/
|
|
||||||
return async (_url, init) => {
|
|
||||||
let url = _url;
|
|
||||||
if (directEndpoint) {
|
|
||||||
url = reverseProxyUrl;
|
|
||||||
}
|
|
||||||
logger.debug(`Making request to ${url}`);
|
|
||||||
if (typeof Bun !== 'undefined') {
|
|
||||||
return await fetch(url, init);
|
|
||||||
}
|
|
||||||
return await fetch(url, init);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add this at the module level outside the class
|
|
||||||
/**
|
|
||||||
* Creates event handlers for stream events that don't capture client references
|
|
||||||
* @param {Object} res - The response object to send events to
|
|
||||||
* @returns {Object} Object containing handler functions
|
|
||||||
*/
|
|
||||||
function createStreamEventHandlers(res) {
|
|
||||||
return {
|
|
||||||
[GraphEvents.ON_RUN_STEP]: (event) => {
|
|
||||||
if (res) {
|
|
||||||
sendEvent(res, event);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[GraphEvents.ON_MESSAGE_DELTA]: (event) => {
|
|
||||||
if (res) {
|
|
||||||
sendEvent(res, event);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[GraphEvents.ON_REASONING_DELTA]: (event) => {
|
|
||||||
if (res) {
|
|
||||||
sendEvent(res, event);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createHandleLLMNewToken(streamRate) {
|
|
||||||
return async () => {
|
|
||||||
if (streamRate) {
|
|
||||||
await sleep(streamRate);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
createFetch,
|
|
||||||
createHandleLLMNewToken,
|
|
||||||
createStreamEventHandlers,
|
|
||||||
};
|
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
const { ChatOpenAI } = require('@langchain/openai');
|
const { ChatOpenAI } = require('@langchain/openai');
|
||||||
const { sanitizeModelName, constructAzureURL } = require('~/utils');
|
const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new instance of a language model (LLM) for chat interactions.
|
* Creates a new instance of a language model (LLM) for chat interactions.
|
||||||
|
|||||||
@@ -309,7 +309,7 @@ describe('AnthropicClient', () => {
|
|||||||
};
|
};
|
||||||
client.setOptions({ modelOptions, promptCache: true });
|
client.setOptions({ modelOptions, promptCache: true });
|
||||||
const anthropicClient = client.getClient(modelOptions);
|
const anthropicClient = client.getClient(modelOptions);
|
||||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not add beta header for other models', () => {
|
it('should not add beta header for other models', () => {
|
||||||
@@ -320,7 +320,7 @@ describe('AnthropicClient', () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const anthropicClient = client.getClient();
|
const anthropicClient = client.getClient();
|
||||||
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
expect(anthropicClient._options.defaultHeaders).toBeUndefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
const { Constants } = require('librechat-data-provider');
|
const { Constants } = require('librechat-data-provider');
|
||||||
const { initializeFakeClient } = require('./FakeClient');
|
const { initializeFakeClient } = require('./FakeClient');
|
||||||
|
|
||||||
jest.mock('~/lib/db/connectDb');
|
jest.mock('~/db/connect');
|
||||||
jest.mock('~/models', () => ({
|
jest.mock('~/models', () => ({
|
||||||
User: jest.fn(),
|
User: jest.fn(),
|
||||||
Key: jest.fn(),
|
Key: jest.fn(),
|
||||||
@@ -33,7 +33,9 @@ jest.mock('~/models', () => ({
|
|||||||
const { getConvo, saveConvo } = require('~/models');
|
const { getConvo, saveConvo } = require('~/models');
|
||||||
|
|
||||||
jest.mock('@librechat/agents', () => {
|
jest.mock('@librechat/agents', () => {
|
||||||
|
const { Providers } = jest.requireActual('@librechat/agents');
|
||||||
return {
|
return {
|
||||||
|
Providers,
|
||||||
ChatOpenAI: jest.fn().mockImplementation(() => {
|
ChatOpenAI: jest.fn().mockImplementation(() => {
|
||||||
return {};
|
return {};
|
||||||
}),
|
}),
|
||||||
@@ -52,7 +54,7 @@ const messageHistory = [
|
|||||||
{
|
{
|
||||||
role: 'user',
|
role: 'user',
|
||||||
isCreatedByUser: true,
|
isCreatedByUser: true,
|
||||||
text: 'What\'s up',
|
text: "What's up",
|
||||||
messageId: '3',
|
messageId: '3',
|
||||||
parentMessageId: '2',
|
parentMessageId: '2',
|
||||||
},
|
},
|
||||||
@@ -456,7 +458,7 @@ describe('BaseClient', () => {
|
|||||||
|
|
||||||
const chatMessages2 = await TestClient.loadHistory(conversationId, '3');
|
const chatMessages2 = await TestClient.loadHistory(conversationId, '3');
|
||||||
expect(TestClient.currentMessages).toHaveLength(3);
|
expect(TestClient.currentMessages).toHaveLength(3);
|
||||||
expect(chatMessages2[chatMessages2.length - 1].text).toEqual('What\'s up');
|
expect(chatMessages2[chatMessages2.length - 1].text).toEqual("What's up");
|
||||||
});
|
});
|
||||||
|
|
||||||
/* Most of the new sendMessage logic revolving around edited/continued AI messages
|
/* Most of the new sendMessage logic revolving around edited/continued AI messages
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ const getLogStores = require('~/cache/getLogStores');
|
|||||||
const OpenAIClient = require('../OpenAIClient');
|
const OpenAIClient = require('../OpenAIClient');
|
||||||
jest.mock('meilisearch');
|
jest.mock('meilisearch');
|
||||||
|
|
||||||
jest.mock('~/lib/db/connectDb');
|
jest.mock('~/db/connect');
|
||||||
jest.mock('~/models', () => ({
|
jest.mock('~/models', () => ({
|
||||||
User: jest.fn(),
|
User: jest.fn(),
|
||||||
Key: jest.fn(),
|
Key: jest.fn(),
|
||||||
@@ -462,17 +462,17 @@ describe('OpenAIClient', () => {
|
|||||||
role: 'system',
|
role: 'system',
|
||||||
name: 'example_user',
|
name: 'example_user',
|
||||||
content:
|
content:
|
||||||
'Let\'s circle back when we have more bandwidth to touch base on opportunities for increased leverage.',
|
"Let's circle back when we have more bandwidth to touch base on opportunities for increased leverage.",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
role: 'system',
|
role: 'system',
|
||||||
name: 'example_assistant',
|
name: 'example_assistant',
|
||||||
content: 'Let\'s talk later when we\'re less busy about how to do better.',
|
content: "Let's talk later when we're less busy about how to do better.",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
role: 'user',
|
role: 'user',
|
||||||
content:
|
content:
|
||||||
'This late pivot means we don\'t have time to boil the ocean for the client deliverable.',
|
"This late pivot means we don't have time to boil the ocean for the client deliverable.",
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ const { Constants } = require('librechat-data-provider');
|
|||||||
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
|
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
|
||||||
const PluginsClient = require('../PluginsClient');
|
const PluginsClient = require('../PluginsClient');
|
||||||
|
|
||||||
jest.mock('~/lib/db/connectDb');
|
jest.mock('~/db/connect');
|
||||||
jest.mock('~/models/Conversation', () => {
|
jest.mock('~/models/Conversation', () => {
|
||||||
return function () {
|
return function () {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -1,184 +0,0 @@
|
|||||||
require('dotenv').config();
|
|
||||||
const fs = require('fs');
|
|
||||||
const { z } = require('zod');
|
|
||||||
const path = require('path');
|
|
||||||
const yaml = require('js-yaml');
|
|
||||||
const { createOpenAPIChain } = require('langchain/chains');
|
|
||||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
|
||||||
const { ChatPromptTemplate, HumanMessagePromptTemplate } = require('@langchain/core/prompts');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
function addLinePrefix(text, prefix = '// ') {
|
|
||||||
return text
|
|
||||||
.split('\n')
|
|
||||||
.map((line) => prefix + line)
|
|
||||||
.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function createPrompt(name, functions) {
|
|
||||||
const prefix = `// The ${name} tool has the following functions. Determine the desired or most optimal function for the user's query:`;
|
|
||||||
const functionDescriptions = functions
|
|
||||||
.map((func) => `// - ${func.name}: ${func.description}`)
|
|
||||||
.join('\n');
|
|
||||||
return `${prefix}\n${functionDescriptions}
|
|
||||||
// You are an expert manager and scrum master. You must provide a detailed intent to better execute the function.
|
|
||||||
// Always format as such: {{"func": "function_name", "intent": "intent and expected result"}}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const AuthBearer = z
|
|
||||||
.object({
|
|
||||||
type: z.string().includes('service_http'),
|
|
||||||
authorization_type: z.string().includes('bearer'),
|
|
||||||
verification_tokens: z.object({
|
|
||||||
openai: z.string(),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.catch(() => false);
|
|
||||||
|
|
||||||
const AuthDefinition = z
|
|
||||||
.object({
|
|
||||||
type: z.string(),
|
|
||||||
authorization_type: z.string(),
|
|
||||||
verification_tokens: z.object({
|
|
||||||
openai: z.string(),
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
.catch(() => false);
|
|
||||||
|
|
||||||
async function readSpecFile(filePath) {
|
|
||||||
try {
|
|
||||||
const fileContents = await fs.promises.readFile(filePath, 'utf8');
|
|
||||||
if (path.extname(filePath) === '.json') {
|
|
||||||
return JSON.parse(fileContents);
|
|
||||||
}
|
|
||||||
return yaml.load(fileContents);
|
|
||||||
} catch (e) {
|
|
||||||
logger.error('[readSpecFile] error', e);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getSpec(url) {
|
|
||||||
const RegularUrl = z
|
|
||||||
.string()
|
|
||||||
.url()
|
|
||||||
.catch(() => false);
|
|
||||||
|
|
||||||
if (RegularUrl.parse(url) && path.extname(url) === '.json') {
|
|
||||||
const response = await fetch(url);
|
|
||||||
return await response.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
const ValidSpecPath = z
|
|
||||||
.string()
|
|
||||||
.url()
|
|
||||||
.catch(async () => {
|
|
||||||
const spec = path.join(__dirname, '..', '.well-known', 'openapi', url);
|
|
||||||
if (!fs.existsSync(spec)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return await readSpecFile(spec);
|
|
||||||
});
|
|
||||||
|
|
||||||
return ValidSpecPath.parse(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createOpenAPIPlugin({ data, llm, user, message, memory, signal }) {
|
|
||||||
let spec;
|
|
||||||
try {
|
|
||||||
spec = await getSpec(data.api.url);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[createOpenAPIPlugin] getSpec error', error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!spec) {
|
|
||||||
logger.warn('[createOpenAPIPlugin] No spec found');
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const headers = {};
|
|
||||||
const { auth, name_for_model, description_for_model, description_for_human } = data;
|
|
||||||
if (auth && AuthDefinition.parse(auth)) {
|
|
||||||
logger.debug('[createOpenAPIPlugin] auth detected', auth);
|
|
||||||
const { openai } = auth.verification_tokens;
|
|
||||||
if (AuthBearer.parse(auth)) {
|
|
||||||
headers.authorization = `Bearer ${openai}`;
|
|
||||||
logger.debug('[createOpenAPIPlugin] added auth bearer', headers);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const chainOptions = { llm };
|
|
||||||
|
|
||||||
if (data.headers && data.headers['librechat_user_id']) {
|
|
||||||
logger.debug('[createOpenAPIPlugin] id detected', headers);
|
|
||||||
headers[data.headers['librechat_user_id']] = user;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(headers).length > 0) {
|
|
||||||
logger.debug('[createOpenAPIPlugin] headers detected', headers);
|
|
||||||
chainOptions.headers = headers;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.params) {
|
|
||||||
logger.debug('[createOpenAPIPlugin] params detected', data.params);
|
|
||||||
chainOptions.params = data.params;
|
|
||||||
}
|
|
||||||
|
|
||||||
let history = '';
|
|
||||||
if (memory) {
|
|
||||||
logger.debug('[createOpenAPIPlugin] openAPI chain: memory detected', memory);
|
|
||||||
const { history: chat_history } = await memory.loadMemoryVariables({});
|
|
||||||
history = chat_history?.length > 0 ? `\n\n## Chat History:\n${chat_history}\n` : '';
|
|
||||||
}
|
|
||||||
|
|
||||||
chainOptions.prompt = ChatPromptTemplate.fromMessages([
|
|
||||||
HumanMessagePromptTemplate.fromTemplate(
|
|
||||||
`# Use the provided API's to respond to this query:\n\n{query}\n\n## Instructions:\n${addLinePrefix(
|
|
||||||
description_for_model,
|
|
||||||
)}${history}`,
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const chain = await createOpenAPIChain(spec, chainOptions);
|
|
||||||
|
|
||||||
const { functions } = chain.chains[0].lc_kwargs.llmKwargs;
|
|
||||||
|
|
||||||
return new DynamicStructuredTool({
|
|
||||||
name: name_for_model,
|
|
||||||
description_for_model: `${addLinePrefix(description_for_human)}${createPrompt(
|
|
||||||
name_for_model,
|
|
||||||
functions,
|
|
||||||
)}`,
|
|
||||||
description: `${description_for_human}`,
|
|
||||||
schema: z.object({
|
|
||||||
func: z
|
|
||||||
.string()
|
|
||||||
.describe(
|
|
||||||
`The function to invoke. The functions available are: ${functions
|
|
||||||
.map((func) => func.name)
|
|
||||||
.join(', ')}`,
|
|
||||||
),
|
|
||||||
intent: z
|
|
||||||
.string()
|
|
||||||
.describe('Describe your intent with the function and your expected result'),
|
|
||||||
}),
|
|
||||||
func: async ({ func = '', intent = '' }) => {
|
|
||||||
const filteredFunctions = functions.filter((f) => f.name === func);
|
|
||||||
chain.chains[0].lc_kwargs.llmKwargs.functions = filteredFunctions;
|
|
||||||
const query = `${message}${func?.length > 0 ? `\n// Intent: ${intent}` : ''}`;
|
|
||||||
const result = await chain.call({
|
|
||||||
query,
|
|
||||||
signal,
|
|
||||||
});
|
|
||||||
return result.response;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getSpec,
|
|
||||||
readSpecFile,
|
|
||||||
createOpenAPIPlugin,
|
|
||||||
};
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
const fs = require('fs');
|
|
||||||
const { createOpenAPIPlugin, getSpec, readSpecFile } = require('./OpenAPIPlugin');
|
|
||||||
|
|
||||||
global.fetch = jest.fn().mockImplementationOnce(() => {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({ key: 'value' }),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
jest.mock('fs', () => ({
|
|
||||||
promises: {
|
|
||||||
readFile: jest.fn(),
|
|
||||||
},
|
|
||||||
existsSync: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('readSpecFile', () => {
|
|
||||||
it('reads JSON file correctly', async () => {
|
|
||||||
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
|
|
||||||
const result = await readSpecFile('test.json');
|
|
||||||
expect(result).toEqual({ test: 'value' });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('reads YAML file correctly', async () => {
|
|
||||||
fs.promises.readFile.mockResolvedValue('test: value');
|
|
||||||
const result = await readSpecFile('test.yaml');
|
|
||||||
expect(result).toEqual({ test: 'value' });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles error correctly', async () => {
|
|
||||||
fs.promises.readFile.mockRejectedValue(new Error('test error'));
|
|
||||||
const result = await readSpecFile('test.json');
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getSpec', () => {
|
|
||||||
it('fetches spec from url correctly', async () => {
|
|
||||||
const parsedJson = await getSpec('https://www.instacart.com/.well-known/ai-plugin.json');
|
|
||||||
const isObject = typeof parsedJson === 'object';
|
|
||||||
expect(isObject).toEqual(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('reads spec from file correctly', async () => {
|
|
||||||
fs.existsSync.mockReturnValue(true);
|
|
||||||
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
|
|
||||||
const result = await getSpec('test.json');
|
|
||||||
expect(result).toEqual({ test: 'value' });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns false when file does not exist', async () => {
|
|
||||||
fs.existsSync.mockReturnValue(false);
|
|
||||||
const result = await getSpec('test.json');
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('createOpenAPIPlugin', () => {
|
|
||||||
it('returns null when getSpec throws an error', async () => {
|
|
||||||
const result = await createOpenAPIPlugin({ data: { api: { url: 'invalid' } } });
|
|
||||||
expect(result).toBe(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns null when no spec is found', async () => {
|
|
||||||
const result = await createOpenAPIPlugin({});
|
|
||||||
expect(result).toBe(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add more tests here for different scenarios
|
|
||||||
});
|
|
||||||
@@ -8,10 +8,10 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
|
|||||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||||
const { getImageBasename } = require('~/server/services/Files/images');
|
const { getImageBasename } = require('~/server/services/Files/images');
|
||||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const { logger } = require('~/config');
|
const logger = require('~/config/winston');
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
"DALL-E displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
class DALLE3 extends Tool {
|
class DALLE3 extends Tool {
|
||||||
constructor(fields = {}) {
|
constructor(fields = {}) {
|
||||||
super();
|
super();
|
||||||
|
|||||||
@@ -4,12 +4,13 @@ const { v4 } = require('uuid');
|
|||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const FormData = require('form-data');
|
const FormData = require('form-data');
|
||||||
const { tool } = require('@langchain/core/tools');
|
const { tool } = require('@langchain/core/tools');
|
||||||
|
const { logAxiosError } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
||||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
const { logAxiosError, extractBaseURL } = require('~/utils');
|
const { extractBaseURL } = require('~/utils');
|
||||||
const { getFiles } = require('~/models/File');
|
const { getFiles } = require('~/models/File');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/** Default descriptions for image generation tool */
|
/** Default descriptions for image generation tool */
|
||||||
const DEFAULT_IMAGE_GEN_DESCRIPTION = `
|
const DEFAULT_IMAGE_GEN_DESCRIPTION = `
|
||||||
@@ -64,7 +65,7 @@ const DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION = `Describe the changes, enhancement
|
|||||||
Always base this prompt on the most recently uploaded reference images.`;
|
Always base this prompt on the most recently uploaded reference images.`;
|
||||||
|
|
||||||
const displayMessage =
|
const displayMessage =
|
||||||
'The tool displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
"The tool displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replaces unwanted characters from the input string
|
* Replaces unwanted characters from the input string
|
||||||
|
|||||||
@@ -1,10 +1,29 @@
|
|||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const DALLE3 = require('../DALLE3');
|
const DALLE3 = require('../DALLE3');
|
||||||
|
const logger = require('~/config/winston');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
jest.mock('openai');
|
jest.mock('openai');
|
||||||
|
|
||||||
|
jest.mock('@librechat/data-schemas', () => {
|
||||||
|
return {
|
||||||
|
logger: {
|
||||||
|
info: jest.fn(),
|
||||||
|
warn: jest.fn(),
|
||||||
|
debug: jest.fn(),
|
||||||
|
error: jest.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.mock('tiktoken', () => {
|
||||||
|
return {
|
||||||
|
encoding_for_model: jest.fn().mockReturnValue({
|
||||||
|
encode: jest.fn(),
|
||||||
|
decode: jest.fn(),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
const processFileURL = jest.fn();
|
const processFileURL = jest.fn();
|
||||||
|
|
||||||
jest.mock('~/server/services/Files/images', () => ({
|
jest.mock('~/server/services/Files/images', () => ({
|
||||||
@@ -37,6 +56,11 @@ jest.mock('fs', () => {
|
|||||||
return {
|
return {
|
||||||
existsSync: jest.fn(),
|
existsSync: jest.fn(),
|
||||||
mkdirSync: jest.fn(),
|
mkdirSync: jest.fn(),
|
||||||
|
promises: {
|
||||||
|
writeFile: jest.fn(),
|
||||||
|
readFile: jest.fn(),
|
||||||
|
unlink: jest.fn(),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -135,7 +135,7 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
|||||||
query: z
|
query: z
|
||||||
.string()
|
.string()
|
||||||
.describe(
|
.describe(
|
||||||
'A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you\'re looking for. The query will be used for semantic similarity matching against the file contents.',
|
"A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you're looking for. The query will be used for semantic similarity matching against the file contents.",
|
||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
|
const { mcpToolPattern } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||||
const {
|
const {
|
||||||
Tools,
|
Tools,
|
||||||
Constants,
|
|
||||||
EToolResources,
|
EToolResources,
|
||||||
loadWebSearchAuth,
|
loadWebSearchAuth,
|
||||||
replaceSpecialVars,
|
replaceSpecialVars,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
|
||||||
const {
|
const {
|
||||||
availableTools,
|
availableTools,
|
||||||
manifestToolMap,
|
manifestToolMap,
|
||||||
@@ -28,11 +28,10 @@ const {
|
|||||||
} = require('../');
|
} = require('../');
|
||||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||||
|
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
|
const { getCachedTools } = require('~/server/services/Config');
|
||||||
const { createMCPTool } = require('~/server/services/MCP');
|
const { createMCPTool } = require('~/server/services/MCP');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||||
@@ -93,7 +92,7 @@ const validateTools = async (user, tools = []) => {
|
|||||||
return Array.from(validToolsSet.values());
|
return Array.from(validToolsSet.values());
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('[validateTools] There was a problem validating tools', err);
|
logger.error('[validateTools] There was a problem validating tools', err);
|
||||||
throw new Error('There was a problem validating tools');
|
throw new Error(err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -236,7 +235,7 @@ const loadTools = async ({
|
|||||||
|
|
||||||
/** @type {Record<string, string>} */
|
/** @type {Record<string, string>} */
|
||||||
const toolContextMap = {};
|
const toolContextMap = {};
|
||||||
const appTools = options.req?.app?.locals?.availableTools ?? {};
|
const appTools = (await getCachedTools({ includeGlobal: true })) ?? {};
|
||||||
|
|
||||||
for (const tool of tools) {
|
for (const tool of tools) {
|
||||||
if (tool === Tools.execute_code) {
|
if (tool === Tools.execute_code) {
|
||||||
@@ -299,6 +298,7 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
|||||||
requestedTools[tool] = async () =>
|
requestedTools[tool] = async () =>
|
||||||
createMCPTool({
|
createMCPTool({
|
||||||
req: options.req,
|
req: options.req,
|
||||||
|
res: options.res,
|
||||||
toolKey: tool,
|
toolKey: tool,
|
||||||
model: agent?.model ?? model,
|
model: agent?.model ?? model,
|
||||||
provider: agent?.provider ?? endpoint,
|
provider: agent?.provider ?? endpoint,
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
const mockUser = {
|
const mongoose = require('mongoose');
|
||||||
_id: 'fakeId',
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
save: jest.fn(),
|
|
||||||
findByIdAndDelete: jest.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockPluginService = {
|
const mockPluginService = {
|
||||||
updateUserPluginAuth: jest.fn(),
|
updateUserPluginAuth: jest.fn(),
|
||||||
@@ -10,23 +7,18 @@ const mockPluginService = {
|
|||||||
getUserPluginAuthValue: jest.fn(),
|
getUserPluginAuthValue: jest.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
jest.mock('~/models/User', () => {
|
|
||||||
return function () {
|
|
||||||
return mockUser;
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
jest.mock('~/server/services/PluginService', () => mockPluginService);
|
||||||
|
|
||||||
const { BaseLLM } = require('@langchain/openai');
|
const { BaseLLM } = require('@langchain/openai');
|
||||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||||
|
|
||||||
const User = require('~/models/User');
|
const { User } = require('~/db/models');
|
||||||
const PluginService = require('~/server/services/PluginService');
|
const PluginService = require('~/server/services/PluginService');
|
||||||
const { validateTools, loadTools, loadToolWithAuth } = require('./handleTools');
|
const { validateTools, loadTools, loadToolWithAuth } = require('./handleTools');
|
||||||
const { StructuredSD, availableTools, DALLE3 } = require('../');
|
const { StructuredSD, availableTools, DALLE3 } = require('../');
|
||||||
|
|
||||||
describe('Tool Handlers', () => {
|
describe('Tool Handlers', () => {
|
||||||
|
let mongoServer;
|
||||||
let fakeUser;
|
let fakeUser;
|
||||||
const pluginKey = 'dalle';
|
const pluginKey = 'dalle';
|
||||||
const pluginKey2 = 'wolfram';
|
const pluginKey2 = 'wolfram';
|
||||||
@@ -37,7 +29,9 @@ describe('Tool Handlers', () => {
|
|||||||
const authConfigs = mainPlugin.authConfig;
|
const authConfigs = mainPlugin.authConfig;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mockUser.save.mockResolvedValue(undefined);
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
|
||||||
const userAuthValues = {};
|
const userAuthValues = {};
|
||||||
mockPluginService.getUserPluginAuthValue.mockImplementation((userId, authField) => {
|
mockPluginService.getUserPluginAuthValue.mockImplementation((userId, authField) => {
|
||||||
@@ -78,9 +72,36 @@ describe('Tool Handlers', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
await mockUser.findByIdAndDelete(fakeUser._id);
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clear mocks but not the database since we need the user to persist
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Reset the mock implementations
|
||||||
|
const userAuthValues = {};
|
||||||
|
mockPluginService.getUserPluginAuthValue.mockImplementation((userId, authField) => {
|
||||||
|
return userAuthValues[`${userId}-${authField}`];
|
||||||
|
});
|
||||||
|
mockPluginService.updateUserPluginAuth.mockImplementation(
|
||||||
|
(userId, authField, _pluginKey, credential) => {
|
||||||
|
const fields = authField.split('||');
|
||||||
|
fields.forEach((field) => {
|
||||||
|
userAuthValues[`${userId}-${field}`] = credential;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Re-add the auth configs for the user
|
||||||
for (const authConfig of authConfigs) {
|
for (const authConfig of authConfigs) {
|
||||||
await PluginService.deleteUserPluginAuth(fakeUser._id, authConfig.authField);
|
await PluginService.updateUserPluginAuth(
|
||||||
|
fakeUser._id,
|
||||||
|
authConfig.authField,
|
||||||
|
pluginKey,
|
||||||
|
mockCredential,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -218,7 +239,6 @@ describe('Tool Handlers', () => {
|
|||||||
try {
|
try {
|
||||||
await loadTool2();
|
await loadTool2();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// eslint-disable-next-line jest/no-conditional-expect
|
|
||||||
expect(error).toBeDefined();
|
expect(error).toBeDefined();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
4
api/cache/banViolation.js
vendored
4
api/cache/banViolation.js
vendored
@@ -1,8 +1,8 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { ViolationTypes } = require('librechat-data-provider');
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||||
const { deleteAllUserSessions } = require('~/models');
|
const { deleteAllUserSessions } = require('~/models');
|
||||||
const getLogStores = require('./getLogStores');
|
const getLogStores = require('./getLogStores');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||||
const interval = math(BAN_INTERVAL, 20);
|
const interval = math(BAN_INTERVAL, 20);
|
||||||
@@ -32,7 +32,6 @@ const banViolation = async (req, res, errorMessage) => {
|
|||||||
if (!isEnabled(BAN_VIOLATIONS)) {
|
if (!isEnabled(BAN_VIOLATIONS)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!errorMessage) {
|
if (!errorMessage) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -51,7 +50,6 @@ const banViolation = async (req, res, errorMessage) => {
|
|||||||
|
|
||||||
const banLogs = getLogStores(ViolationTypes.BAN);
|
const banLogs = getLogStores(ViolationTypes.BAN);
|
||||||
const duration = errorMessage.duration || banLogs.opts.ttl;
|
const duration = errorMessage.duration || banLogs.opts.ttl;
|
||||||
|
|
||||||
if (duration <= 0) {
|
if (duration <= 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
60
api/cache/banViolation.spec.js
vendored
60
api/cache/banViolation.spec.js
vendored
@@ -1,48 +1,28 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const banViolation = require('./banViolation');
|
const banViolation = require('./banViolation');
|
||||||
|
|
||||||
jest.mock('keyv');
|
// Mock deleteAllUserSessions since we're testing ban logic, not session deletion
|
||||||
jest.mock('../models/Session');
|
jest.mock('~/models', () => ({
|
||||||
// Mocking the getLogStores function
|
...jest.requireActual('~/models'),
|
||||||
jest.mock('./getLogStores', () => {
|
deleteAllUserSessions: jest.fn().mockResolvedValue(true),
|
||||||
return jest.fn().mockImplementation(() => {
|
}));
|
||||||
const EventEmitter = require('events');
|
|
||||||
const { CacheKeys } = require('librechat-data-provider');
|
|
||||||
const math = require('../server/utils/math');
|
|
||||||
const mockGet = jest.fn();
|
|
||||||
const mockSet = jest.fn();
|
|
||||||
class KeyvMongo extends EventEmitter {
|
|
||||||
constructor(url = 'mongodb://127.0.0.1:27017', options) {
|
|
||||||
super();
|
|
||||||
this.ttlSupport = false;
|
|
||||||
url = url ?? {};
|
|
||||||
if (typeof url === 'string') {
|
|
||||||
url = { url };
|
|
||||||
}
|
|
||||||
if (url.uri) {
|
|
||||||
url = { url: url.uri, ...url };
|
|
||||||
}
|
|
||||||
this.opts = {
|
|
||||||
url,
|
|
||||||
collection: 'keyv',
|
|
||||||
...url,
|
|
||||||
...options,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
get = mockGet;
|
|
||||||
set = mockSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new KeyvMongo('', {
|
|
||||||
namespace: CacheKeys.BANS,
|
|
||||||
ttl: math(process.env.BAN_DURATION, 7200000),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('banViolation', () => {
|
describe('banViolation', () => {
|
||||||
|
let mongoServer;
|
||||||
let req, res, errorMessage;
|
let req, res, errorMessage;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
req = {
|
req = {
|
||||||
ip: '127.0.0.1',
|
ip: '127.0.0.1',
|
||||||
@@ -55,7 +35,7 @@ describe('banViolation', () => {
|
|||||||
};
|
};
|
||||||
errorMessage = {
|
errorMessage = {
|
||||||
type: 'someViolation',
|
type: 'someViolation',
|
||||||
user_id: '12345',
|
user_id: new mongoose.Types.ObjectId().toString(), // Use valid ObjectId
|
||||||
prev_count: 0,
|
prev_count: 0,
|
||||||
violation_count: 0,
|
violation_count: 0,
|
||||||
};
|
};
|
||||||
|
|||||||
7
api/cache/getLogStores.js
vendored
7
api/cache/getLogStores.js
vendored
@@ -1,7 +1,7 @@
|
|||||||
const { Keyv } = require('keyv');
|
const { Keyv } = require('keyv');
|
||||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
||||||
const { logFile, violationFile } = require('./keyvFiles');
|
const { logFile, violationFile } = require('./keyvFiles');
|
||||||
const { math, isEnabled } = require('~/server/utils');
|
const { isEnabled, math } = require('~/server/utils');
|
||||||
const keyvRedis = require('./keyvRedis');
|
const keyvRedis = require('./keyvRedis');
|
||||||
const keyvMongo = require('./keyvMongo');
|
const keyvMongo = require('./keyvMongo');
|
||||||
|
|
||||||
@@ -29,6 +29,10 @@ const roles = isRedisEnabled
|
|||||||
? new Keyv({ store: keyvRedis })
|
? new Keyv({ store: keyvRedis })
|
||||||
: new Keyv({ namespace: CacheKeys.ROLES });
|
: new Keyv({ namespace: CacheKeys.ROLES });
|
||||||
|
|
||||||
|
const mcpTools = isRedisEnabled
|
||||||
|
? new Keyv({ store: keyvRedis })
|
||||||
|
: new Keyv({ namespace: CacheKeys.MCP_TOOLS });
|
||||||
|
|
||||||
const audioRuns = isRedisEnabled
|
const audioRuns = isRedisEnabled
|
||||||
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
|
||||||
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES });
|
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES });
|
||||||
@@ -67,6 +71,7 @@ const openIdExchangedTokensCache = isRedisEnabled
|
|||||||
|
|
||||||
const namespaces = {
|
const namespaces = {
|
||||||
[CacheKeys.ROLES]: roles,
|
[CacheKeys.ROLES]: roles,
|
||||||
|
[CacheKeys.MCP_TOOLS]: mcpTools,
|
||||||
[CacheKeys.CONFIG_STORE]: config,
|
[CacheKeys.CONFIG_STORE]: config,
|
||||||
[CacheKeys.PENDING_REQ]: pending_req,
|
[CacheKeys.PENDING_REQ]: pending_req,
|
||||||
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
const axios = require('axios');
|
|
||||||
const { EventSource } = require('eventsource');
|
const { EventSource } = require('eventsource');
|
||||||
const { Time, CacheKeys } = require('librechat-data-provider');
|
const { Time } = require('librechat-data-provider');
|
||||||
const { MCPManager, FlowStateManager } = require('librechat-mcp');
|
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
||||||
const logger = require('./winston');
|
const logger = require('./winston');
|
||||||
|
|
||||||
global.EventSource = EventSource;
|
global.EventSource = EventSource;
|
||||||
@@ -16,7 +15,7 @@ let flowManager = null;
|
|||||||
*/
|
*/
|
||||||
function getMCPManager(userId) {
|
function getMCPManager(userId) {
|
||||||
if (!mcpManager) {
|
if (!mcpManager) {
|
||||||
mcpManager = MCPManager.getInstance(logger);
|
mcpManager = MCPManager.getInstance();
|
||||||
} else {
|
} else {
|
||||||
mcpManager.checkIdleConnections(userId);
|
mcpManager.checkIdleConnections(userId);
|
||||||
}
|
}
|
||||||
@@ -31,66 +30,13 @@ function getFlowStateManager(flowsCache) {
|
|||||||
if (!flowManager) {
|
if (!flowManager) {
|
||||||
flowManager = new FlowStateManager(flowsCache, {
|
flowManager = new FlowStateManager(flowsCache, {
|
||||||
ttl: Time.ONE_MINUTE * 3,
|
ttl: Time.ONE_MINUTE * 3,
|
||||||
logger,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return flowManager;
|
return flowManager;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Sends message data in Server Sent Events format.
|
|
||||||
* @param {ServerResponse} res - The server response.
|
|
||||||
* @param {{ data: string | Record<string, unknown>, event?: string }} event - The message event.
|
|
||||||
* @param {string} event.event - The type of event.
|
|
||||||
* @param {string} event.data - The message to be sent.
|
|
||||||
*/
|
|
||||||
const sendEvent = (res, event) => {
|
|
||||||
if (typeof event.data === 'string' && event.data.length === 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates and configures an Axios instance with optional proxy settings.
|
|
||||||
*
|
|
||||||
* @typedef {import('axios').AxiosInstance} AxiosInstance
|
|
||||||
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
|
|
||||||
*
|
|
||||||
* @returns {AxiosInstance} A configured Axios instance
|
|
||||||
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
|
|
||||||
*/
|
|
||||||
function createAxiosInstance() {
|
|
||||||
const instance = axios.create();
|
|
||||||
|
|
||||||
if (process.env.proxy) {
|
|
||||||
try {
|
|
||||||
const url = new URL(process.env.proxy);
|
|
||||||
|
|
||||||
/** @type {AxiosProxyConfig} */
|
|
||||||
const proxyConfig = {
|
|
||||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
|
||||||
protocol: url.protocol.replace(':', ''),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (url.port) {
|
|
||||||
proxyConfig.port = parseInt(url.port, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
instance.defaults.proxy = proxyConfig;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error parsing proxy URL:', error);
|
|
||||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
logger,
|
logger,
|
||||||
sendEvent,
|
|
||||||
getMCPManager,
|
getMCPManager,
|
||||||
createAxiosInstance,
|
|
||||||
getFlowStateManager,
|
getFlowStateManager,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -39,7 +39,10 @@ async function connectDb() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
cached.conn = await cached.promise;
|
cached.conn = await cached.promise;
|
||||||
|
|
||||||
return cached.conn;
|
return cached.conn;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = connectDb;
|
module.exports = {
|
||||||
|
connectDb,
|
||||||
|
};
|
||||||
8
api/db/index.js
Normal file
8
api/db/index.js
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { createModels } = require('@librechat/data-schemas');
|
||||||
|
const { connectDb } = require('./connect');
|
||||||
|
const indexSync = require('./indexSync');
|
||||||
|
|
||||||
|
createModels(mongoose);
|
||||||
|
|
||||||
|
module.exports = { connectDb, indexSync };
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
const { MeiliSearch } = require('meilisearch');
|
const { MeiliSearch } = require('meilisearch');
|
||||||
const { Conversation } = require('~/models/Conversation');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { Message } = require('~/models/Message');
|
|
||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('~/server/utils');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
const Conversation = mongoose.models.Conversation;
|
||||||
|
const Message = mongoose.models.Message;
|
||||||
|
|
||||||
const searchEnabled = isEnabled(process.env.SEARCH);
|
const searchEnabled = isEnabled(process.env.SEARCH);
|
||||||
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
|
const indexingDisabled = isEnabled(process.env.MEILI_NO_SYNC);
|
||||||
@@ -29,7 +32,6 @@ async function indexSync() {
|
|||||||
if (!searchEnabled) {
|
if (!searchEnabled) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const client = MeiliSearchClient.getInstance();
|
const client = MeiliSearchClient.getInstance();
|
||||||
|
|
||||||
5
api/db/models.js
Normal file
5
api/db/models.js
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { createModels } = require('@librechat/data-schemas');
|
||||||
|
const models = createModels(mongoose);
|
||||||
|
|
||||||
|
module.exports = { ...models };
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
const connectDb = require('./connectDb');
|
|
||||||
const indexSync = require('./indexSync');
|
|
||||||
|
|
||||||
module.exports = { connectDb, indexSync };
|
|
||||||
@@ -1,7 +1,4 @@
|
|||||||
const mongoose = require('mongoose');
|
const { Action } = require('~/db/models');
|
||||||
const { actionSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const Action = mongoose.model('action', actionSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update an action with new data without overwriting existing properties,
|
* Update an action with new data without overwriting existing properties,
|
||||||
|
|||||||
@@ -1,21 +1,18 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const crypto = require('node:crypto');
|
const crypto = require('node:crypto');
|
||||||
const { agentSchema } = require('@librechat/data-schemas');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||||
require('librechat-data-provider').Constants;
|
require('librechat-data-provider').Constants;
|
||||||
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
|
|
||||||
const {
|
const {
|
||||||
getProjectByName,
|
getProjectByName,
|
||||||
addAgentIdsToProject,
|
addAgentIdsToProject,
|
||||||
removeAgentIdsFromProject,
|
removeAgentIdsFromProject,
|
||||||
removeAgentFromAllProjects,
|
removeAgentFromAllProjects,
|
||||||
} = require('./Project');
|
} = require('./Project');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const { getCachedTools } = require('~/server/services/Config');
|
||||||
const { getActions } = require('./Action');
|
const { getActions } = require('./Action');
|
||||||
const { logger } = require('~/config');
|
const { Agent } = require('~/db/models');
|
||||||
|
|
||||||
const Agent = mongoose.model('agent', agentSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an agent with the provided data.
|
* Create an agent with the provided data.
|
||||||
@@ -24,7 +21,7 @@ const Agent = mongoose.model('agent', agentSchema);
|
|||||||
* @throws {Error} If the agent creation fails.
|
* @throws {Error} If the agent creation fails.
|
||||||
*/
|
*/
|
||||||
const createAgent = async (agentData) => {
|
const createAgent = async (agentData) => {
|
||||||
const { author, ...versionData } = agentData;
|
const { author: _author, ...versionData } = agentData;
|
||||||
const timestamp = new Date();
|
const timestamp = new Date();
|
||||||
const initialAgentData = {
|
const initialAgentData = {
|
||||||
...agentData,
|
...agentData,
|
||||||
@@ -57,12 +54,12 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
|
|||||||
* @param {string} params.agent_id
|
* @param {string} params.agent_id
|
||||||
* @param {string} params.endpoint
|
* @param {string} params.endpoint
|
||||||
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
||||||
* @returns {Agent|null} The agent document as a plain object, or null if not found.
|
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
||||||
*/
|
*/
|
||||||
const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
||||||
const { model, ...model_parameters } = _m;
|
const { model, ...model_parameters } = _m;
|
||||||
/** @type {Record<string, FunctionTool>} */
|
/** @type {Record<string, FunctionTool>} */
|
||||||
const availableTools = req.app.locals.availableTools;
|
const availableTools = await getCachedTools({ includeGlobal: true });
|
||||||
/** @type {TEphemeralAgent | null} */
|
/** @type {TEphemeralAgent | null} */
|
||||||
const ephemeralAgent = req.body.ephemeralAgent;
|
const ephemeralAgent = req.body.ephemeralAgent;
|
||||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||||
@@ -113,7 +110,7 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (agent_id === EPHEMERAL_AGENT_ID) {
|
if (agent_id === EPHEMERAL_AGENT_ID) {
|
||||||
return loadEphemeralAgent({ req, agent_id, endpoint, model_parameters });
|
return await loadEphemeralAgent({ req, agent_id, endpoint, model_parameters });
|
||||||
}
|
}
|
||||||
const agent = await getAgent({
|
const agent = await getAgent({
|
||||||
id: agent_id,
|
id: agent_id,
|
||||||
@@ -124,29 +121,7 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
agent.version = agent.versions ? agent.versions.length : 0;
|
agent.version = agent.versions ? agent.versions.length : 0;
|
||||||
|
return agent;
|
||||||
if (agent.author.toString() === req.user.id) {
|
|
||||||
return agent;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!agent.projectIds) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const cache = getLogStores(CONFIG_STORE);
|
|
||||||
/** @type {TStartupConfig} */
|
|
||||||
const cachedStartupConfig = await cache.get(STARTUP_CONFIG);
|
|
||||||
let { instanceProjectId } = cachedStartupConfig ?? {};
|
|
||||||
if (!instanceProjectId) {
|
|
||||||
instanceProjectId = (await getProjectByName(GLOBAL_PROJECT_NAME, '_id'))._id.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const projectObjectId of agent.projectIds) {
|
|
||||||
const projectId = projectObjectId.toString();
|
|
||||||
if (projectId === instanceProjectId) {
|
|
||||||
return agent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -172,12 +147,11 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
|
|||||||
'created_at',
|
'created_at',
|
||||||
'updated_at',
|
'updated_at',
|
||||||
'__v',
|
'__v',
|
||||||
'agent_ids',
|
|
||||||
'versions',
|
'versions',
|
||||||
'actionsHash', // Exclude actionsHash from direct comparison
|
'actionsHash', // Exclude actionsHash from direct comparison
|
||||||
];
|
];
|
||||||
|
|
||||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
|
||||||
|
|
||||||
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
||||||
return null;
|
return null;
|
||||||
@@ -262,16 +236,24 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
|
|||||||
* @param {Object} [options] - Optional configuration object.
|
* @param {Object} [options] - Optional configuration object.
|
||||||
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||||
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
||||||
|
* @param {boolean} [options.skipVersioning] - Skip version creation entirely (useful for isolated operations like sharing).
|
||||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||||
* @throws {Error} If the update would create a duplicate version
|
* @throws {Error} If the update would create a duplicate version
|
||||||
*/
|
*/
|
||||||
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||||
const { updatingUserId = null, forceVersion = false } = options;
|
const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options;
|
||||||
const mongoOptions = { new: true, upsert: false };
|
const mongoOptions = { new: true, upsert: false };
|
||||||
|
|
||||||
const currentAgent = await Agent.findOne(searchParameter);
|
const currentAgent = await Agent.findOne(searchParameter);
|
||||||
if (currentAgent) {
|
if (currentAgent) {
|
||||||
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
const {
|
||||||
|
__v,
|
||||||
|
_id,
|
||||||
|
id: __id,
|
||||||
|
versions,
|
||||||
|
author: _author,
|
||||||
|
...versionData
|
||||||
|
} = currentAgent.toObject();
|
||||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||||
|
|
||||||
let actionsHash = null;
|
let actionsHash = null;
|
||||||
@@ -303,10 +285,8 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const shouldCreateVersion =
|
const shouldCreateVersion =
|
||||||
forceVersion ||
|
!skipVersioning &&
|
||||||
(versions &&
|
(forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet);
|
||||||
versions.length > 0 &&
|
|
||||||
(Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet));
|
|
||||||
|
|
||||||
if (shouldCreateVersion) {
|
if (shouldCreateVersion) {
|
||||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||||
@@ -341,7 +321,7 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
|||||||
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldCreateVersion || forceVersion) {
|
if (shouldCreateVersion) {
|
||||||
updateData.$push = {
|
updateData.$push = {
|
||||||
...($push || {}),
|
...($push || {}),
|
||||||
versions: versionEntry,
|
versions: versionEntry,
|
||||||
@@ -464,8 +444,110 @@ const deleteAgent = async (searchParameter) => {
|
|||||||
return agent;
|
return agent;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get agents by accessible IDs with optional cursor-based pagination.
|
||||||
|
* @param {Object} params - The parameters for getting accessible agents.
|
||||||
|
* @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
|
||||||
|
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||||
|
* @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
|
||||||
|
* @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||||
|
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||||
|
*/
|
||||||
|
const getListAgentsByAccess = async ({
|
||||||
|
accessibleIds = [],
|
||||||
|
otherParams = {},
|
||||||
|
limit = null,
|
||||||
|
after = null,
|
||||||
|
}) => {
|
||||||
|
const isPaginated = limit !== null && limit !== undefined;
|
||||||
|
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||||
|
|
||||||
|
// Build base query combining ACL accessible agents with other filters
|
||||||
|
const baseQuery = { ...otherParams };
|
||||||
|
|
||||||
|
if (accessibleIds.length > 0) {
|
||||||
|
baseQuery._id = { $in: accessibleIds };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add cursor condition
|
||||||
|
if (after) {
|
||||||
|
try {
|
||||||
|
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||||
|
const { updatedAt, _id } = cursor;
|
||||||
|
|
||||||
|
const cursorCondition = {
|
||||||
|
$or: [
|
||||||
|
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||||
|
{ updatedAt: new Date(updatedAt), _id: { $gt: mongoose.Types.ObjectId(_id) } },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Merge cursor condition with base query
|
||||||
|
if (Object.keys(baseQuery).length > 0) {
|
||||||
|
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||||
|
// Remove the original conditions from baseQuery to avoid duplication
|
||||||
|
Object.keys(baseQuery).forEach((key) => {
|
||||||
|
if (key !== '$and') delete baseQuery[key];
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
Object.assign(baseQuery, cursorCondition);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Invalid cursor:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let query = Agent.find(baseQuery, {
|
||||||
|
id: 1,
|
||||||
|
_id: 1,
|
||||||
|
name: 1,
|
||||||
|
avatar: 1,
|
||||||
|
author: 1,
|
||||||
|
projectIds: 1,
|
||||||
|
description: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
}).sort({ updatedAt: -1, _id: 1 });
|
||||||
|
|
||||||
|
// Only apply limit if pagination is requested
|
||||||
|
if (isPaginated) {
|
||||||
|
query = query.limit(normalizedLimit + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const agents = await query.lean();
|
||||||
|
|
||||||
|
const hasMore = isPaginated ? agents.length > normalizedLimit : false;
|
||||||
|
const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
|
||||||
|
if (agent.author) {
|
||||||
|
agent.author = agent.author.toString();
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate next cursor only if paginated
|
||||||
|
let nextCursor = null;
|
||||||
|
if (isPaginated && hasMore && data.length > 0) {
|
||||||
|
const lastAgent = agents[normalizedLimit - 1];
|
||||||
|
nextCursor = Buffer.from(
|
||||||
|
JSON.stringify({
|
||||||
|
updatedAt: lastAgent.updatedAt.toISOString(),
|
||||||
|
_id: lastAgent._id.toString(),
|
||||||
|
}),
|
||||||
|
).toString('base64');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
object: 'list',
|
||||||
|
data,
|
||||||
|
first_id: data.length > 0 ? data[0].id : null,
|
||||||
|
last_id: data.length > 0 ? data[data.length - 1].id : null,
|
||||||
|
has_more: hasMore,
|
||||||
|
after: nextCursor,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all agents.
|
* Get all agents.
|
||||||
|
* @deprecated Use getListAgentsByAccess for ACL-aware agent listing
|
||||||
* @param {Object} searchParameter - The search parameters to find matching agents.
|
* @param {Object} searchParameter - The search parameters to find matching agents.
|
||||||
* @param {string} searchParameter.author - The user ID of the agent's author.
|
* @param {string} searchParameter.author - The user ID of the agent's author.
|
||||||
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
||||||
@@ -481,16 +563,16 @@ const getListAgents = async (searchParameter) => {
|
|||||||
delete globalQuery.author;
|
delete globalQuery.author;
|
||||||
query = { $or: [globalQuery, query] };
|
query = { $or: [globalQuery, query] };
|
||||||
}
|
}
|
||||||
|
|
||||||
const agents = (
|
const agents = (
|
||||||
await Agent.find(query, {
|
await Agent.find(query, {
|
||||||
id: 1,
|
id: 1,
|
||||||
_id: 0,
|
_id: 1,
|
||||||
name: 1,
|
name: 1,
|
||||||
avatar: 1,
|
avatar: 1,
|
||||||
author: 1,
|
author: 1,
|
||||||
projectIds: 1,
|
projectIds: 1,
|
||||||
description: 1,
|
description: 1,
|
||||||
|
// @deprecated - isCollaborative replaced by ACL permissions
|
||||||
isCollaborative: 1,
|
isCollaborative: 1,
|
||||||
}).lean()
|
}).lean()
|
||||||
).map((agent) => {
|
).map((agent) => {
|
||||||
@@ -553,7 +635,10 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
|||||||
delete updateQuery.author;
|
delete updateQuery.author;
|
||||||
}
|
}
|
||||||
|
|
||||||
const updatedAgent = await updateAgent(updateQuery, updateOps, { updatingUserId: user.id });
|
const updatedAgent = await updateAgent(updateQuery, updateOps, {
|
||||||
|
updatingUserId: user.id,
|
||||||
|
skipVersioning: true,
|
||||||
|
});
|
||||||
if (updatedAgent) {
|
if (updatedAgent) {
|
||||||
return updatedAgent;
|
return updatedAgent;
|
||||||
}
|
}
|
||||||
@@ -662,7 +747,6 @@ const generateActionMetadataHash = async (actionIds, actions) => {
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Agent,
|
|
||||||
getAgent,
|
getAgent,
|
||||||
loadAgent,
|
loadAgent,
|
||||||
createAgent,
|
createAgent,
|
||||||
@@ -672,6 +756,7 @@ module.exports = {
|
|||||||
revertAgentVersion,
|
revertAgentVersion,
|
||||||
updateAgentProjects,
|
updateAgentProjects,
|
||||||
addAgentResourceFile,
|
addAgentResourceFile,
|
||||||
|
getListAgentsByAccess,
|
||||||
removeAgentResourceFiles,
|
removeAgentResourceFiles,
|
||||||
generateActionMetadataHash,
|
generateActionMetadataHash,
|
||||||
};
|
};
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,4 @@
|
|||||||
const mongoose = require('mongoose');
|
const { Assistant } = require('~/db/models');
|
||||||
const { assistantSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update an assistant with new data without overwriting existing properties,
|
* Update an assistant with new data without overwriting existing properties,
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { balanceSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
module.exports = mongoose.model('Balance', balanceSchema);
|
|
||||||
@@ -1,8 +1,5 @@
|
|||||||
const mongoose = require('mongoose');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const logger = require('~/config/winston');
|
const { Banner } = require('~/db/models');
|
||||||
const { bannerSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const Banner = mongoose.model('Banner', bannerSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves the current active banner.
|
* Retrieves the current active banner.
|
||||||
@@ -28,4 +25,4 @@ const getBanner = async (user) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = { Banner, getBanner };
|
module.exports = { getBanner };
|
||||||
|
|||||||
@@ -1,86 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const major = [0, 0];
|
|
||||||
const minor = [0, 0];
|
|
||||||
const patch = [0, 5];
|
|
||||||
|
|
||||||
const configSchema = mongoose.Schema(
|
|
||||||
{
|
|
||||||
tag: {
|
|
||||||
type: String,
|
|
||||||
required: true,
|
|
||||||
validate: {
|
|
||||||
validator: function (tag) {
|
|
||||||
const [part1, part2, part3] = tag.replace('v', '').split('.').map(Number);
|
|
||||||
|
|
||||||
// Check if all parts are numbers
|
|
||||||
if (isNaN(part1) || isNaN(part2) || isNaN(part3)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if all parts are within their respective ranges
|
|
||||||
if (part1 < major[0] || part1 > major[1]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (part2 < minor[0] || part2 > minor[1]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (part3 < patch[0] || part3 > patch[1]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
message: 'Invalid tag value',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
searchEnabled: {
|
|
||||||
type: Boolean,
|
|
||||||
default: false,
|
|
||||||
},
|
|
||||||
usersEnabled: {
|
|
||||||
type: Boolean,
|
|
||||||
default: false,
|
|
||||||
},
|
|
||||||
startupCounts: {
|
|
||||||
type: Number,
|
|
||||||
default: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{ timestamps: true },
|
|
||||||
);
|
|
||||||
|
|
||||||
// Instance method
|
|
||||||
configSchema.methods.incrementCount = function () {
|
|
||||||
this.startupCounts += 1;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Static methods
|
|
||||||
configSchema.statics.findByTag = async function (tag) {
|
|
||||||
return await this.findOne({ tag }).lean();
|
|
||||||
};
|
|
||||||
|
|
||||||
configSchema.statics.updateByTag = async function (tag, update) {
|
|
||||||
return await this.findOneAndUpdate({ tag }, update, { new: true });
|
|
||||||
};
|
|
||||||
|
|
||||||
const Config = mongoose.models.Config || mongoose.model('Config', configSchema);
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getConfigs: async (filter) => {
|
|
||||||
try {
|
|
||||||
return await Config.find(filter).lean();
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error getting configs', error);
|
|
||||||
return { config: 'Error getting configs' };
|
|
||||||
}
|
|
||||||
},
|
|
||||||
deleteConfigs: async (filter) => {
|
|
||||||
try {
|
|
||||||
return await Config.deleteMany(filter);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error deleting configs', error);
|
|
||||||
return { config: 'Error deleting configs' };
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const Conversation = require('./schema/convoSchema');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { getMessages, deleteMessages } = require('./Message');
|
const { getMessages, deleteMessages } = require('./Message');
|
||||||
const logger = require('~/config/winston');
|
const { Conversation } = require('~/db/models');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Searches for a conversation by conversationId and returns a lean document with only conversationId and user.
|
* Searches for a conversation by conversationId and returns a lean document with only conversationId and user.
|
||||||
@@ -75,7 +75,6 @@ const getConvoFiles = async (conversationId) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Conversation,
|
|
||||||
getConvoFiles,
|
getConvoFiles,
|
||||||
searchConversation,
|
searchConversation,
|
||||||
deleteNullOrEmptyConversations,
|
deleteNullOrEmptyConversations,
|
||||||
@@ -155,7 +154,6 @@ module.exports = {
|
|||||||
{ cursor, limit = 25, isArchived = false, tags, search, order = 'desc' } = {},
|
{ cursor, limit = 25, isArchived = false, tags, search, order = 'desc' } = {},
|
||||||
) => {
|
) => {
|
||||||
const filters = [{ user }];
|
const filters = [{ user }];
|
||||||
|
|
||||||
if (isArchived) {
|
if (isArchived) {
|
||||||
filters.push({ isArchived: true });
|
filters.push({ isArchived: true });
|
||||||
} else {
|
} else {
|
||||||
@@ -288,7 +286,6 @@ module.exports = {
|
|||||||
deleteConvos: async (user, filter) => {
|
deleteConvos: async (user, filter) => {
|
||||||
try {
|
try {
|
||||||
const userFilter = { ...filter, user };
|
const userFilter = { ...filter, user };
|
||||||
|
|
||||||
const conversations = await Conversation.find(userFilter).select('conversationId');
|
const conversations = await Conversation.find(userFilter).select('conversationId');
|
||||||
const conversationIds = conversations.map((c) => c.conversationId);
|
const conversationIds = conversations.map((c) => c.conversationId);
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,5 @@
|
|||||||
const mongoose = require('mongoose');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const Conversation = require('./schema/convoSchema');
|
const { ConversationTag, Conversation } = require('~/db/models');
|
||||||
const logger = require('~/config/winston');
|
|
||||||
|
|
||||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves all conversation tags for a user.
|
* Retrieves all conversation tags for a user.
|
||||||
@@ -140,13 +135,13 @@ const adjustPositions = async (user, oldPosition, newPosition) => {
|
|||||||
const position =
|
const position =
|
||||||
oldPosition < newPosition
|
oldPosition < newPosition
|
||||||
? {
|
? {
|
||||||
$gt: Math.min(oldPosition, newPosition),
|
$gt: Math.min(oldPosition, newPosition),
|
||||||
$lte: Math.max(oldPosition, newPosition),
|
$lte: Math.max(oldPosition, newPosition),
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
$gte: Math.min(oldPosition, newPosition),
|
$gte: Math.min(oldPosition, newPosition),
|
||||||
$lt: Math.max(oldPosition, newPosition),
|
$lt: Math.max(oldPosition, newPosition),
|
||||||
};
|
};
|
||||||
|
|
||||||
await ConversationTag.updateMany(
|
await ConversationTag.updateMany(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
const mongoose = require('mongoose');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { EToolResources } = require('librechat-data-provider');
|
const { EToolResources } = require('librechat-data-provider');
|
||||||
const { fileSchema } = require('@librechat/data-schemas');
|
const { File } = require('~/db/models');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const File = mongoose.model('File', fileSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds a file by its file_id with additional query options.
|
* Finds a file by its file_id with additional query options.
|
||||||
@@ -169,7 +166,6 @@ async function batchUpdateFiles(updates) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
File,
|
|
||||||
findFileById,
|
findFileById,
|
||||||
getFiles,
|
getFiles,
|
||||||
getToolFilesByIds,
|
getToolFilesByIds,
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { keySchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
module.exports = mongoose.model('Key', keySchema);
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const Message = require('./schema/messageSchema');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { logger } = require('~/config');
|
const { Message } = require('~/db/models');
|
||||||
|
|
||||||
const idSchema = z.string().uuid();
|
const idSchema = z.string().uuid();
|
||||||
|
|
||||||
@@ -68,7 +68,6 @@ async function saveMessage(req, params, metadata) {
|
|||||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||||
update.tokenCount = 0;
|
update.tokenCount = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
const message = await Message.findOneAndUpdate(
|
const message = await Message.findOneAndUpdate(
|
||||||
{ messageId: params.messageId, user: req.user.id },
|
{ messageId: params.messageId, user: req.user.id },
|
||||||
update,
|
update,
|
||||||
@@ -140,7 +139,6 @@ async function bulkSaveMessages(messages, overrideTimestamp = false) {
|
|||||||
upsert: true,
|
upsert: true,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const result = await Message.bulkWrite(bulkOps);
|
const result = await Message.bulkWrite(bulkOps);
|
||||||
return result;
|
return result;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -255,6 +253,7 @@ async function updateMessage(req, message, metadata) {
|
|||||||
text: updatedMessage.text,
|
text: updatedMessage.text,
|
||||||
isCreatedByUser: updatedMessage.isCreatedByUser,
|
isCreatedByUser: updatedMessage.isCreatedByUser,
|
||||||
tokenCount: updatedMessage.tokenCount,
|
tokenCount: updatedMessage.tokenCount,
|
||||||
|
feedback: updatedMessage.feedback,
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error updating message:', err);
|
logger.error('Error updating message:', err);
|
||||||
@@ -355,7 +354,6 @@ async function deleteMessages(filter) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Message,
|
|
||||||
saveMessage,
|
saveMessage,
|
||||||
bulkSaveMessages,
|
bulkSaveMessages,
|
||||||
recordMessage,
|
recordMessage,
|
||||||
|
|||||||
@@ -1,32 +1,7 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
|
const { messageSchema } = require('@librechat/data-schemas');
|
||||||
jest.mock('mongoose');
|
|
||||||
|
|
||||||
const mockFindQuery = {
|
|
||||||
select: jest.fn().mockReturnThis(),
|
|
||||||
sort: jest.fn().mockReturnThis(),
|
|
||||||
lean: jest.fn().mockReturnThis(),
|
|
||||||
deleteMany: jest.fn().mockResolvedValue({ deletedCount: 1 }),
|
|
||||||
};
|
|
||||||
|
|
||||||
const mockSchema = {
|
|
||||||
findOneAndUpdate: jest.fn(),
|
|
||||||
updateOne: jest.fn(),
|
|
||||||
findOne: jest.fn(() => ({
|
|
||||||
lean: jest.fn(),
|
|
||||||
})),
|
|
||||||
find: jest.fn(() => mockFindQuery),
|
|
||||||
deleteMany: jest.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
mongoose.model.mockReturnValue(mockSchema);
|
|
||||||
|
|
||||||
jest.mock('~/models/schema/messageSchema', () => mockSchema);
|
|
||||||
|
|
||||||
jest.mock('~/config/winston', () => ({
|
|
||||||
error: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
saveMessage,
|
saveMessage,
|
||||||
@@ -35,77 +10,102 @@ const {
|
|||||||
deleteMessages,
|
deleteMessages,
|
||||||
updateMessageText,
|
updateMessageText,
|
||||||
deleteMessagesSince,
|
deleteMessagesSince,
|
||||||
} = require('~/models/Message');
|
} = require('./Message');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
||||||
|
*/
|
||||||
|
let Message;
|
||||||
|
|
||||||
describe('Message Operations', () => {
|
describe('Message Operations', () => {
|
||||||
|
let mongoServer;
|
||||||
let mockReq;
|
let mockReq;
|
||||||
let mockMessage;
|
let mockMessageData;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeAll(async () => {
|
||||||
jest.clearAllMocks();
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clear database
|
||||||
|
await Message.deleteMany({});
|
||||||
|
|
||||||
mockReq = {
|
mockReq = {
|
||||||
user: { id: 'user123' },
|
user: { id: 'user123' },
|
||||||
};
|
};
|
||||||
|
|
||||||
mockMessage = {
|
mockMessageData = {
|
||||||
messageId: 'msg123',
|
messageId: 'msg123',
|
||||||
conversationId: uuidv4(),
|
conversationId: uuidv4(),
|
||||||
text: 'Hello, world!',
|
text: 'Hello, world!',
|
||||||
user: 'user123',
|
user: 'user123',
|
||||||
};
|
};
|
||||||
|
|
||||||
mockSchema.findOneAndUpdate.mockResolvedValue({
|
|
||||||
toObject: () => mockMessage,
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('saveMessage', () => {
|
describe('saveMessage', () => {
|
||||||
it('should save a message for an authenticated user', async () => {
|
it('should save a message for an authenticated user', async () => {
|
||||||
const result = await saveMessage(mockReq, mockMessage);
|
const result = await saveMessage(mockReq, mockMessageData);
|
||||||
expect(result).toEqual(mockMessage);
|
|
||||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
expect(result.messageId).toBe('msg123');
|
||||||
{ messageId: 'msg123', user: 'user123' },
|
expect(result.user).toBe('user123');
|
||||||
expect.objectContaining({ user: 'user123' }),
|
expect(result.text).toBe('Hello, world!');
|
||||||
expect.any(Object),
|
|
||||||
);
|
// Verify the message was actually saved to the database
|
||||||
|
const savedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||||
|
expect(savedMessage).toBeTruthy();
|
||||||
|
expect(savedMessage.text).toBe('Hello, world!');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error for unauthenticated user', async () => {
|
it('should throw an error for unauthenticated user', async () => {
|
||||||
mockReq.user = null;
|
mockReq.user = null;
|
||||||
await expect(saveMessage(mockReq, mockMessage)).rejects.toThrow('User not authenticated');
|
await expect(saveMessage(mockReq, mockMessageData)).rejects.toThrow('User not authenticated');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error for invalid conversation ID', async () => {
|
it('should handle invalid conversation ID gracefully', async () => {
|
||||||
mockMessage.conversationId = 'invalid-id';
|
mockMessageData.conversationId = 'invalid-id';
|
||||||
await expect(saveMessage(mockReq, mockMessage)).resolves.toBeUndefined();
|
const result = await saveMessage(mockReq, mockMessageData);
|
||||||
|
expect(result).toBeUndefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('updateMessageText', () => {
|
describe('updateMessageText', () => {
|
||||||
it('should update message text for the authenticated user', async () => {
|
it('should update message text for the authenticated user', async () => {
|
||||||
|
// First save a message
|
||||||
|
await saveMessage(mockReq, mockMessageData);
|
||||||
|
|
||||||
|
// Then update it
|
||||||
await updateMessageText(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
await updateMessageText(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||||
expect(mockSchema.updateOne).toHaveBeenCalledWith(
|
|
||||||
{ messageId: 'msg123', user: 'user123' },
|
// Verify the update
|
||||||
{ text: 'Updated text' },
|
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||||
);
|
expect(updatedMessage.text).toBe('Updated text');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('updateMessage', () => {
|
describe('updateMessage', () => {
|
||||||
it('should update a message for the authenticated user', async () => {
|
it('should update a message for the authenticated user', async () => {
|
||||||
mockSchema.findOneAndUpdate.mockResolvedValue(mockMessage);
|
// First save a message
|
||||||
|
await saveMessage(mockReq, mockMessageData);
|
||||||
|
|
||||||
const result = await updateMessage(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
const result = await updateMessage(mockReq, { messageId: 'msg123', text: 'Updated text' });
|
||||||
expect(result).toEqual(
|
|
||||||
expect.objectContaining({
|
expect(result.messageId).toBe('msg123');
|
||||||
messageId: 'msg123',
|
expect(result.text).toBe('Updated text');
|
||||||
text: 'Hello, world!',
|
|
||||||
}),
|
// Verify in database
|
||||||
);
|
const updatedMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||||
|
expect(updatedMessage.text).toBe('Updated text');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if message is not found', async () => {
|
it('should throw an error if message is not found', async () => {
|
||||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
|
||||||
await expect(
|
await expect(
|
||||||
updateMessage(mockReq, { messageId: 'nonexistent', text: 'Test' }),
|
updateMessage(mockReq, { messageId: 'nonexistent', text: 'Test' }),
|
||||||
).rejects.toThrow('Message not found or user not authorized.');
|
).rejects.toThrow('Message not found or user not authorized.');
|
||||||
@@ -114,19 +114,45 @@ describe('Message Operations', () => {
|
|||||||
|
|
||||||
describe('deleteMessagesSince', () => {
|
describe('deleteMessagesSince', () => {
|
||||||
it('should delete messages only for the authenticated user', async () => {
|
it('should delete messages only for the authenticated user', async () => {
|
||||||
mockSchema.findOne().lean.mockResolvedValueOnce({ createdAt: new Date() });
|
const conversationId = uuidv4();
|
||||||
mockFindQuery.deleteMany.mockResolvedValueOnce({ deletedCount: 1 });
|
|
||||||
const result = await deleteMessagesSince(mockReq, {
|
// Create multiple messages in the same conversation
|
||||||
messageId: 'msg123',
|
const message1 = await saveMessage(mockReq, {
|
||||||
conversationId: 'convo123',
|
messageId: 'msg1',
|
||||||
|
conversationId,
|
||||||
|
text: 'First message',
|
||||||
|
user: 'user123',
|
||||||
});
|
});
|
||||||
expect(mockSchema.findOne).toHaveBeenCalledWith({ messageId: 'msg123', user: 'user123' });
|
|
||||||
expect(mockSchema.find).not.toHaveBeenCalled();
|
const message2 = await saveMessage(mockReq, {
|
||||||
expect(result).toBeUndefined();
|
messageId: 'msg2',
|
||||||
|
conversationId,
|
||||||
|
text: 'Second message',
|
||||||
|
user: 'user123',
|
||||||
|
});
|
||||||
|
|
||||||
|
const message3 = await saveMessage(mockReq, {
|
||||||
|
messageId: 'msg3',
|
||||||
|
conversationId,
|
||||||
|
text: 'Third message',
|
||||||
|
user: 'user123',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete messages since message2 (this should only delete messages created AFTER msg2)
|
||||||
|
await deleteMessagesSince(mockReq, {
|
||||||
|
messageId: 'msg2',
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify msg1 and msg2 remain, msg3 is deleted
|
||||||
|
const remainingMessages = await Message.find({ conversationId, user: 'user123' });
|
||||||
|
expect(remainingMessages).toHaveLength(2);
|
||||||
|
expect(remainingMessages.map((m) => m.messageId)).toContain('msg1');
|
||||||
|
expect(remainingMessages.map((m) => m.messageId)).toContain('msg2');
|
||||||
|
expect(remainingMessages.map((m) => m.messageId)).not.toContain('msg3');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined if no message is found', async () => {
|
it('should return undefined if no message is found', async () => {
|
||||||
mockSchema.findOne().lean.mockResolvedValueOnce(null);
|
|
||||||
const result = await deleteMessagesSince(mockReq, {
|
const result = await deleteMessagesSince(mockReq, {
|
||||||
messageId: 'nonexistent',
|
messageId: 'nonexistent',
|
||||||
conversationId: 'convo123',
|
conversationId: 'convo123',
|
||||||
@@ -137,29 +163,71 @@ describe('Message Operations', () => {
|
|||||||
|
|
||||||
describe('getMessages', () => {
|
describe('getMessages', () => {
|
||||||
it('should retrieve messages with the correct filter', async () => {
|
it('should retrieve messages with the correct filter', async () => {
|
||||||
const filter = { conversationId: 'convo123' };
|
const conversationId = uuidv4();
|
||||||
await getMessages(filter);
|
|
||||||
expect(mockSchema.find).toHaveBeenCalledWith(filter);
|
// Save some messages
|
||||||
expect(mockFindQuery.sort).toHaveBeenCalledWith({ createdAt: 1 });
|
await saveMessage(mockReq, {
|
||||||
expect(mockFindQuery.lean).toHaveBeenCalled();
|
messageId: 'msg1',
|
||||||
|
conversationId,
|
||||||
|
text: 'First message',
|
||||||
|
user: 'user123',
|
||||||
|
});
|
||||||
|
|
||||||
|
await saveMessage(mockReq, {
|
||||||
|
messageId: 'msg2',
|
||||||
|
conversationId,
|
||||||
|
text: 'Second message',
|
||||||
|
user: 'user123',
|
||||||
|
});
|
||||||
|
|
||||||
|
const messages = await getMessages({ conversationId });
|
||||||
|
expect(messages).toHaveLength(2);
|
||||||
|
expect(messages[0].text).toBe('First message');
|
||||||
|
expect(messages[1].text).toBe('Second message');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('deleteMessages', () => {
|
describe('deleteMessages', () => {
|
||||||
it('should delete messages with the correct filter', async () => {
|
it('should delete messages with the correct filter', async () => {
|
||||||
|
// Save some messages for different users
|
||||||
|
await saveMessage(mockReq, mockMessageData);
|
||||||
|
await saveMessage(
|
||||||
|
{ user: { id: 'user456' } },
|
||||||
|
{
|
||||||
|
messageId: 'msg456',
|
||||||
|
conversationId: uuidv4(),
|
||||||
|
text: 'Other user message',
|
||||||
|
user: 'user456',
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
await deleteMessages({ user: 'user123' });
|
await deleteMessages({ user: 'user123' });
|
||||||
expect(mockSchema.deleteMany).toHaveBeenCalledWith({ user: 'user123' });
|
|
||||||
|
// Verify only user123's messages were deleted
|
||||||
|
const user123Messages = await Message.find({ user: 'user123' });
|
||||||
|
const user456Messages = await Message.find({ user: 'user456' });
|
||||||
|
|
||||||
|
expect(user123Messages).toHaveLength(0);
|
||||||
|
expect(user456Messages).toHaveLength(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Conversation Hijacking Prevention', () => {
|
describe('Conversation Hijacking Prevention', () => {
|
||||||
it('should not allow editing a message in another user\'s conversation', async () => {
|
it("should not allow editing a message in another user's conversation", async () => {
|
||||||
const attackerReq = { user: { id: 'attacker123' } };
|
const attackerReq = { user: { id: 'attacker123' } };
|
||||||
const victimConversationId = 'victim-convo-123';
|
const victimConversationId = uuidv4();
|
||||||
const victimMessageId = 'victim-msg-123';
|
const victimMessageId = 'victim-msg-123';
|
||||||
|
|
||||||
mockSchema.findOneAndUpdate.mockResolvedValue(null);
|
// First, save a message as the victim (but we'll try to edit as attacker)
|
||||||
|
const victimReq = { user: { id: 'victim123' } };
|
||||||
|
await saveMessage(victimReq, {
|
||||||
|
messageId: victimMessageId,
|
||||||
|
conversationId: victimConversationId,
|
||||||
|
text: 'Victim message',
|
||||||
|
user: 'victim123',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attacker tries to edit the victim's message
|
||||||
await expect(
|
await expect(
|
||||||
updateMessage(attackerReq, {
|
updateMessage(attackerReq, {
|
||||||
messageId: victimMessageId,
|
messageId: victimMessageId,
|
||||||
@@ -168,71 +236,82 @@ describe('Message Operations', () => {
|
|||||||
}),
|
}),
|
||||||
).rejects.toThrow('Message not found or user not authorized.');
|
).rejects.toThrow('Message not found or user not authorized.');
|
||||||
|
|
||||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
// Verify the original message is unchanged
|
||||||
{ messageId: victimMessageId, user: 'attacker123' },
|
const originalMessage = await Message.findOne({
|
||||||
expect.anything(),
|
messageId: victimMessageId,
|
||||||
expect.anything(),
|
user: 'victim123',
|
||||||
);
|
});
|
||||||
|
expect(originalMessage.text).toBe('Victim message');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow deleting messages from another user\'s conversation', async () => {
|
it("should not allow deleting messages from another user's conversation", async () => {
|
||||||
const attackerReq = { user: { id: 'attacker123' } };
|
const attackerReq = { user: { id: 'attacker123' } };
|
||||||
const victimConversationId = 'victim-convo-123';
|
const victimConversationId = uuidv4();
|
||||||
const victimMessageId = 'victim-msg-123';
|
const victimMessageId = 'victim-msg-123';
|
||||||
|
|
||||||
mockSchema.findOne().lean.mockResolvedValueOnce(null); // Simulating message not found for this user
|
// Save a message as the victim
|
||||||
|
const victimReq = { user: { id: 'victim123' } };
|
||||||
|
await saveMessage(victimReq, {
|
||||||
|
messageId: victimMessageId,
|
||||||
|
conversationId: victimConversationId,
|
||||||
|
text: 'Victim message',
|
||||||
|
user: 'victim123',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attacker tries to delete from victim's conversation
|
||||||
const result = await deleteMessagesSince(attackerReq, {
|
const result = await deleteMessagesSince(attackerReq, {
|
||||||
messageId: victimMessageId,
|
messageId: victimMessageId,
|
||||||
conversationId: victimConversationId,
|
conversationId: victimConversationId,
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result).toBeUndefined();
|
expect(result).toBeUndefined();
|
||||||
expect(mockSchema.findOne).toHaveBeenCalledWith({
|
|
||||||
|
// Verify the victim's message still exists
|
||||||
|
const victimMessage = await Message.findOne({
|
||||||
messageId: victimMessageId,
|
messageId: victimMessageId,
|
||||||
user: 'attacker123',
|
user: 'victim123',
|
||||||
});
|
});
|
||||||
|
expect(victimMessage).toBeTruthy();
|
||||||
|
expect(victimMessage.text).toBe('Victim message');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow inserting a new message into another user\'s conversation', async () => {
|
it("should not allow inserting a new message into another user's conversation", async () => {
|
||||||
const attackerReq = { user: { id: 'attacker123' } };
|
const attackerReq = { user: { id: 'attacker123' } };
|
||||||
const victimConversationId = uuidv4(); // Use a valid UUID
|
const victimConversationId = uuidv4();
|
||||||
|
|
||||||
await expect(
|
// Attacker tries to save a message - this should succeed but with attacker's user ID
|
||||||
saveMessage(attackerReq, {
|
const result = await saveMessage(attackerReq, {
|
||||||
conversationId: victimConversationId,
|
conversationId: victimConversationId,
|
||||||
text: 'Inserted malicious message',
|
text: 'Inserted malicious message',
|
||||||
messageId: 'new-msg-123',
|
messageId: 'new-msg-123',
|
||||||
}),
|
user: 'attacker123',
|
||||||
).resolves.not.toThrow(); // It should not throw an error
|
});
|
||||||
|
|
||||||
// Check that the message was saved with the attacker's user ID
|
expect(result).toBeTruthy();
|
||||||
expect(mockSchema.findOneAndUpdate).toHaveBeenCalledWith(
|
expect(result.user).toBe('attacker123');
|
||||||
{ messageId: 'new-msg-123', user: 'attacker123' },
|
|
||||||
expect.objectContaining({
|
// Verify the message was saved with the attacker's user ID, not as an anonymous message
|
||||||
user: 'attacker123',
|
const savedMessage = await Message.findOne({ messageId: 'new-msg-123' });
|
||||||
conversationId: victimConversationId,
|
expect(savedMessage.user).toBe('attacker123');
|
||||||
}),
|
expect(savedMessage.conversationId).toBe(victimConversationId);
|
||||||
expect.anything(),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow retrieving messages from any conversation', async () => {
|
it('should allow retrieving messages from any conversation', async () => {
|
||||||
const victimConversationId = 'victim-convo-123';
|
const victimConversationId = uuidv4();
|
||||||
|
|
||||||
await getMessages({ conversationId: victimConversationId });
|
// Save a message in the victim's conversation
|
||||||
|
const victimReq = { user: { id: 'victim123' } };
|
||||||
expect(mockSchema.find).toHaveBeenCalledWith({
|
await saveMessage(victimReq, {
|
||||||
|
messageId: 'victim-msg',
|
||||||
conversationId: victimConversationId,
|
conversationId: victimConversationId,
|
||||||
|
text: 'Victim message',
|
||||||
|
user: 'victim123',
|
||||||
});
|
});
|
||||||
|
|
||||||
mockSchema.find.mockReturnValueOnce({
|
// Anyone should be able to retrieve messages by conversation ID
|
||||||
select: jest.fn().mockReturnThis(),
|
const messages = await getMessages({ conversationId: victimConversationId });
|
||||||
sort: jest.fn().mockReturnThis(),
|
expect(messages).toHaveLength(1);
|
||||||
lean: jest.fn().mockResolvedValue([{ text: 'Test message' }]),
|
expect(messages[0].text).toBe('Victim message');
|
||||||
});
|
|
||||||
|
|
||||||
const result = await getMessages({ conversationId: victimConversationId });
|
|
||||||
expect(result).toEqual([{ text: 'Test message' }]);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const Preset = require('./schema/presetSchema');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { logger } = require('~/config');
|
const { Preset } = require('~/db/models');
|
||||||
|
|
||||||
const getPreset = async (user, presetId) => {
|
const getPreset = async (user, presetId) => {
|
||||||
try {
|
try {
|
||||||
@@ -11,7 +11,6 @@ const getPreset = async (user, presetId) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Preset,
|
|
||||||
getPreset,
|
getPreset,
|
||||||
getPresets: async (user, filter) => {
|
getPresets: async (user, filter) => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
const { model } = require('mongoose');
|
|
||||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||||
const { projectSchema } = require('@librechat/data-schemas');
|
const { Project } = require('~/db/models');
|
||||||
|
|
||||||
const Project = model('Project', projectSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve a project by ID and convert the found project document to a plain object.
|
* Retrieve a project by ID and convert the found project document to a plain object.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { ObjectId } = require('mongodb');
|
const { ObjectId } = require('mongodb');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||||
const {
|
const {
|
||||||
getProjectByName,
|
getProjectByName,
|
||||||
@@ -7,12 +7,8 @@ const {
|
|||||||
removeGroupIdsFromProject,
|
removeGroupIdsFromProject,
|
||||||
removeGroupFromAllProjects,
|
removeGroupFromAllProjects,
|
||||||
} = require('./Project');
|
} = require('./Project');
|
||||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
const { PromptGroup, Prompt } = require('~/db/models');
|
||||||
const { escapeRegExp } = require('~/server/utils');
|
const { escapeRegExp } = require('~/server/utils');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
|
||||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a pipeline for the aggregation to get prompt groups
|
* Create a pipeline for the aggregation to get prompt groups
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const {
|
const {
|
||||||
CacheKeys,
|
CacheKeys,
|
||||||
SystemRoles,
|
SystemRoles,
|
||||||
@@ -7,11 +6,9 @@ const {
|
|||||||
permissionsSchema,
|
permissionsSchema,
|
||||||
removeNullishValues,
|
removeNullishValues,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
const { roleSchema } = require('@librechat/data-schemas');
|
const { Role } = require('~/db/models');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const Role = mongoose.model('Role', roleSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieve a role by name and convert the found role document to a plain object.
|
* Retrieve a role by name and convert the found role document to a plain object.
|
||||||
@@ -173,35 +170,6 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize default roles in the system.
|
|
||||||
* Creates the default roles (ADMIN, USER) if they don't exist in the database.
|
|
||||||
* Updates existing roles with new permission types if they're missing.
|
|
||||||
*
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
const initializeRoles = async function () {
|
|
||||||
for (const roleName of [SystemRoles.ADMIN, SystemRoles.USER]) {
|
|
||||||
let role = await Role.findOne({ name: roleName });
|
|
||||||
const defaultPerms = roleDefaults[roleName].permissions;
|
|
||||||
|
|
||||||
if (!role) {
|
|
||||||
// Create new role if it doesn't exist.
|
|
||||||
role = new Role(roleDefaults[roleName]);
|
|
||||||
} else {
|
|
||||||
// Ensure role.permissions is defined.
|
|
||||||
role.permissions = role.permissions || {};
|
|
||||||
// For each permission type in defaults, add it if missing.
|
|
||||||
for (const permType of Object.keys(defaultPerms)) {
|
|
||||||
if (role.permissions[permType] == null) {
|
|
||||||
role.permissions[permType] = defaultPerms[permType];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await role.save();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Migrates roles from old schema to new schema structure.
|
* Migrates roles from old schema to new schema structure.
|
||||||
* This can be called directly to fix existing roles.
|
* This can be called directly to fix existing roles.
|
||||||
@@ -282,10 +250,8 @@ const migrateRoleSchema = async function (roleName) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
Role,
|
|
||||||
getRoleByName,
|
getRoleByName,
|
||||||
initializeRoles,
|
|
||||||
updateRoleByName,
|
updateRoleByName,
|
||||||
updateAccessPermissions,
|
|
||||||
migrateRoleSchema,
|
migrateRoleSchema,
|
||||||
|
updateAccessPermissions,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,8 +6,10 @@ const {
|
|||||||
roleDefaults,
|
roleDefaults,
|
||||||
PermissionTypes,
|
PermissionTypes,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { Role, getRoleByName, updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
const { getRoleByName, updateAccessPermissions } = require('~/models/Role');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
|
const { initializeRoles } = require('~/models');
|
||||||
|
const { Role } = require('~/db/models');
|
||||||
|
|
||||||
// Mock the cache
|
// Mock the cache
|
||||||
jest.mock('~/cache/getLogStores', () =>
|
jest.mock('~/cache/getLogStores', () =>
|
||||||
|
|||||||
@@ -1,275 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const signPayload = require('~/server/services/signPayload');
|
|
||||||
const { hashToken } = require('~/server/utils/crypto');
|
|
||||||
const { sessionSchema } = require('@librechat/data-schemas');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
const Session = mongoose.model('Session', sessionSchema);
|
|
||||||
|
|
||||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
|
||||||
const expires = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Error class for Session-related errors
|
|
||||||
*/
|
|
||||||
class SessionError extends Error {
|
|
||||||
constructor(message, code = 'SESSION_ERROR') {
|
|
||||||
super(message);
|
|
||||||
this.name = 'SessionError';
|
|
||||||
this.code = code;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new session for a user
|
|
||||||
* @param {string} userId - The ID of the user
|
|
||||||
* @param {Object} options - Additional options for session creation
|
|
||||||
* @param {Date} options.expiration - Custom expiration date
|
|
||||||
* @returns {Promise<{session: Session, refreshToken: string}>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const createSession = async (userId, options = {}) => {
|
|
||||||
if (!userId) {
|
|
||||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = new Session({
|
|
||||||
user: userId,
|
|
||||||
expiration: options.expiration || new Date(Date.now() + expires),
|
|
||||||
});
|
|
||||||
const refreshToken = await generateRefreshToken(session);
|
|
||||||
return { session, refreshToken };
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[createSession] Error creating session:', error);
|
|
||||||
throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds a session by various parameters
|
|
||||||
* @param {Object} params - Search parameters
|
|
||||||
* @param {string} [params.refreshToken] - The refresh token to search by
|
|
||||||
* @param {string} [params.userId] - The user ID to search by
|
|
||||||
* @param {string} [params.sessionId] - The session ID to search by
|
|
||||||
* @param {Object} [options] - Additional options
|
|
||||||
* @param {boolean} [options.lean=true] - Whether to return plain objects instead of documents
|
|
||||||
* @returns {Promise<Session|null>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const findSession = async (params, options = { lean: true }) => {
|
|
||||||
try {
|
|
||||||
const query = {};
|
|
||||||
|
|
||||||
if (!params.refreshToken && !params.userId && !params.sessionId) {
|
|
||||||
throw new SessionError('At least one search parameter is required', 'INVALID_SEARCH_PARAMS');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.refreshToken) {
|
|
||||||
const tokenHash = await hashToken(params.refreshToken);
|
|
||||||
query.refreshTokenHash = tokenHash;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.userId) {
|
|
||||||
query.user = params.userId;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.sessionId) {
|
|
||||||
const sessionId = params.sessionId.sessionId || params.sessionId;
|
|
||||||
if (!mongoose.Types.ObjectId.isValid(sessionId)) {
|
|
||||||
throw new SessionError('Invalid session ID format', 'INVALID_SESSION_ID');
|
|
||||||
}
|
|
||||||
query._id = sessionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add expiration check to only return valid sessions
|
|
||||||
query.expiration = { $gt: new Date() };
|
|
||||||
|
|
||||||
const sessionQuery = Session.findOne(query);
|
|
||||||
|
|
||||||
if (options.lean) {
|
|
||||||
return await sessionQuery.lean();
|
|
||||||
}
|
|
||||||
|
|
||||||
return await sessionQuery.exec();
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[findSession] Error finding session:', error);
|
|
||||||
throw new SessionError('Failed to find session', 'FIND_SESSION_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates session expiration
|
|
||||||
* @param {Session|string} session - The session or session ID to update
|
|
||||||
* @param {Date} [newExpiration] - Optional new expiration date
|
|
||||||
* @returns {Promise<Session>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const updateExpiration = async (session, newExpiration) => {
|
|
||||||
try {
|
|
||||||
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
|
||||||
|
|
||||||
if (!sessionDoc) {
|
|
||||||
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
|
||||||
}
|
|
||||||
|
|
||||||
sessionDoc.expiration = newExpiration || new Date(Date.now() + expires);
|
|
||||||
return await sessionDoc.save();
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[updateExpiration] Error updating session:', error);
|
|
||||||
throw new SessionError('Failed to update session expiration', 'UPDATE_EXPIRATION_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes a session by refresh token or session ID
|
|
||||||
* @param {Object} params - Delete parameters
|
|
||||||
* @param {string} [params.refreshToken] - The refresh token of the session to delete
|
|
||||||
* @param {string} [params.sessionId] - The ID of the session to delete
|
|
||||||
* @returns {Promise<Object>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const deleteSession = async (params) => {
|
|
||||||
try {
|
|
||||||
if (!params.refreshToken && !params.sessionId) {
|
|
||||||
throw new SessionError(
|
|
||||||
'Either refreshToken or sessionId is required',
|
|
||||||
'INVALID_DELETE_PARAMS',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const query = {};
|
|
||||||
|
|
||||||
if (params.refreshToken) {
|
|
||||||
query.refreshTokenHash = await hashToken(params.refreshToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.sessionId) {
|
|
||||||
query._id = params.sessionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await Session.deleteOne(query);
|
|
||||||
|
|
||||||
if (result.deletedCount === 0) {
|
|
||||||
logger.warn('[deleteSession] No session found to delete');
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[deleteSession] Error deleting session:', error);
|
|
||||||
throw new SessionError('Failed to delete session', 'DELETE_SESSION_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes all sessions for a user
|
|
||||||
* @param {string} userId - The ID of the user
|
|
||||||
* @param {Object} [options] - Additional options
|
|
||||||
* @param {boolean} [options.excludeCurrentSession] - Whether to exclude the current session
|
|
||||||
* @param {string} [options.currentSessionId] - The ID of the current session to exclude
|
|
||||||
* @returns {Promise<Object>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const deleteAllUserSessions = async (userId, options = {}) => {
|
|
||||||
try {
|
|
||||||
if (!userId) {
|
|
||||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract userId if it's passed as an object
|
|
||||||
const userIdString = userId.userId || userId;
|
|
||||||
|
|
||||||
if (!mongoose.Types.ObjectId.isValid(userIdString)) {
|
|
||||||
throw new SessionError('Invalid user ID format', 'INVALID_USER_ID_FORMAT');
|
|
||||||
}
|
|
||||||
|
|
||||||
const query = { user: userIdString };
|
|
||||||
|
|
||||||
if (options.excludeCurrentSession && options.currentSessionId) {
|
|
||||||
query._id = { $ne: options.currentSessionId };
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await Session.deleteMany(query);
|
|
||||||
|
|
||||||
if (result.deletedCount > 0) {
|
|
||||||
logger.debug(
|
|
||||||
`[deleteAllUserSessions] Deleted ${result.deletedCount} sessions for user ${userIdString}.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[deleteAllUserSessions] Error deleting user sessions:', error);
|
|
||||||
throw new SessionError('Failed to delete user sessions', 'DELETE_ALL_SESSIONS_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a refresh token for a session
|
|
||||||
* @param {Session} session - The session to generate a token for
|
|
||||||
* @returns {Promise<string>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const generateRefreshToken = async (session) => {
|
|
||||||
if (!session || !session.user) {
|
|
||||||
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const expiresIn = session.expiration ? session.expiration.getTime() : Date.now() + expires;
|
|
||||||
|
|
||||||
if (!session.expiration) {
|
|
||||||
session.expiration = new Date(expiresIn);
|
|
||||||
}
|
|
||||||
|
|
||||||
const refreshToken = await signPayload({
|
|
||||||
payload: {
|
|
||||||
id: session.user,
|
|
||||||
sessionId: session._id,
|
|
||||||
},
|
|
||||||
secret: process.env.JWT_REFRESH_SECRET,
|
|
||||||
expirationTime: Math.floor((expiresIn - Date.now()) / 1000),
|
|
||||||
});
|
|
||||||
|
|
||||||
session.refreshTokenHash = await hashToken(refreshToken);
|
|
||||||
await session.save();
|
|
||||||
|
|
||||||
return refreshToken;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[generateRefreshToken] Error generating refresh token:', error);
|
|
||||||
throw new SessionError('Failed to generate refresh token', 'GENERATE_TOKEN_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Counts active sessions for a user
|
|
||||||
* @param {string} userId - The ID of the user
|
|
||||||
* @returns {Promise<number>}
|
|
||||||
* @throws {SessionError}
|
|
||||||
*/
|
|
||||||
const countActiveSessions = async (userId) => {
|
|
||||||
try {
|
|
||||||
if (!userId) {
|
|
||||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
return await Session.countDocuments({
|
|
||||||
user: userId,
|
|
||||||
expiration: { $gt: new Date() },
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[countActiveSessions] Error counting active sessions:', error);
|
|
||||||
throw new SessionError('Failed to count active sessions', 'COUNT_SESSIONS_FAILED');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
createSession,
|
|
||||||
findSession,
|
|
||||||
updateExpiration,
|
|
||||||
deleteSession,
|
|
||||||
deleteAllUserSessions,
|
|
||||||
generateRefreshToken,
|
|
||||||
countActiveSessions,
|
|
||||||
SessionError,
|
|
||||||
};
|
|
||||||
@@ -1,351 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { nanoid } = require('nanoid');
|
|
||||||
const { Constants } = require('librechat-data-provider');
|
|
||||||
const { Conversation } = require('~/models/Conversation');
|
|
||||||
const { shareSchema } = require('@librechat/data-schemas');
|
|
||||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
|
||||||
const { getMessages } = require('./Message');
|
|
||||||
const logger = require('~/config/winston');
|
|
||||||
|
|
||||||
class ShareServiceError extends Error {
|
|
||||||
constructor(message, code) {
|
|
||||||
super(message);
|
|
||||||
this.name = 'ShareServiceError';
|
|
||||||
this.code = code;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const memoizedAnonymizeId = (prefix) => {
|
|
||||||
const memo = new Map();
|
|
||||||
return (id) => {
|
|
||||||
if (!memo.has(id)) {
|
|
||||||
memo.set(id, `${prefix}_${nanoid()}`);
|
|
||||||
}
|
|
||||||
return memo.get(id);
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const anonymizeConvoId = memoizedAnonymizeId('convo');
|
|
||||||
const anonymizeAssistantId = memoizedAnonymizeId('a');
|
|
||||||
const anonymizeMessageId = (id) =>
|
|
||||||
id === Constants.NO_PARENT ? id : memoizedAnonymizeId('msg')(id);
|
|
||||||
|
|
||||||
function anonymizeConvo(conversation) {
|
|
||||||
if (!conversation) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newConvo = { ...conversation };
|
|
||||||
if (newConvo.assistant_id) {
|
|
||||||
newConvo.assistant_id = anonymizeAssistantId(newConvo.assistant_id);
|
|
||||||
}
|
|
||||||
return newConvo;
|
|
||||||
}
|
|
||||||
|
|
||||||
function anonymizeMessages(messages, newConvoId) {
|
|
||||||
if (!Array.isArray(messages)) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const idMap = new Map();
|
|
||||||
return messages.map((message) => {
|
|
||||||
const newMessageId = anonymizeMessageId(message.messageId);
|
|
||||||
idMap.set(message.messageId, newMessageId);
|
|
||||||
|
|
||||||
const anonymizedAttachments = message.attachments?.map((attachment) => {
|
|
||||||
return {
|
|
||||||
...attachment,
|
|
||||||
messageId: newMessageId,
|
|
||||||
conversationId: newConvoId,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
...message,
|
|
||||||
messageId: newMessageId,
|
|
||||||
parentMessageId:
|
|
||||||
idMap.get(message.parentMessageId) || anonymizeMessageId(message.parentMessageId),
|
|
||||||
conversationId: newConvoId,
|
|
||||||
model: message.model?.startsWith('asst_')
|
|
||||||
? anonymizeAssistantId(message.model)
|
|
||||||
: message.model,
|
|
||||||
attachments: anonymizedAttachments,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getSharedMessages(shareId) {
|
|
||||||
try {
|
|
||||||
const share = await SharedLink.findOne({ shareId, isPublic: true })
|
|
||||||
.populate({
|
|
||||||
path: 'messages',
|
|
||||||
select: '-_id -__v -user',
|
|
||||||
})
|
|
||||||
.select('-_id -__v -user')
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
if (!share?.conversationId || !share.isPublic) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newConvoId = anonymizeConvoId(share.conversationId);
|
|
||||||
const result = {
|
|
||||||
...share,
|
|
||||||
conversationId: newConvoId,
|
|
||||||
messages: anonymizeMessages(share.messages, newConvoId),
|
|
||||||
};
|
|
||||||
|
|
||||||
return result;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[getShare] Error getting share link', {
|
|
||||||
error: error.message,
|
|
||||||
shareId,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error getting share link', 'SHARE_FETCH_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getSharedLinks(user, pageParam, pageSize, isPublic, sortBy, sortDirection, search) {
|
|
||||||
try {
|
|
||||||
const query = { user, isPublic };
|
|
||||||
|
|
||||||
if (pageParam) {
|
|
||||||
if (sortDirection === 'desc') {
|
|
||||||
query[sortBy] = { $lt: pageParam };
|
|
||||||
} else {
|
|
||||||
query[sortBy] = { $gt: pageParam };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (search && search.trim()) {
|
|
||||||
try {
|
|
||||||
const searchResults = await Conversation.meiliSearch(search);
|
|
||||||
|
|
||||||
if (!searchResults?.hits?.length) {
|
|
||||||
return {
|
|
||||||
links: [],
|
|
||||||
nextCursor: undefined,
|
|
||||||
hasNextPage: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const conversationIds = searchResults.hits.map((hit) => hit.conversationId);
|
|
||||||
query['conversationId'] = { $in: conversationIds };
|
|
||||||
} catch (searchError) {
|
|
||||||
logger.error('[getSharedLinks] Meilisearch error', {
|
|
||||||
error: searchError.message,
|
|
||||||
user,
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
links: [],
|
|
||||||
nextCursor: undefined,
|
|
||||||
hasNextPage: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const sort = {};
|
|
||||||
sort[sortBy] = sortDirection === 'desc' ? -1 : 1;
|
|
||||||
|
|
||||||
if (Array.isArray(query.conversationId)) {
|
|
||||||
query.conversationId = { $in: query.conversationId };
|
|
||||||
}
|
|
||||||
|
|
||||||
const sharedLinks = await SharedLink.find(query)
|
|
||||||
.sort(sort)
|
|
||||||
.limit(pageSize + 1)
|
|
||||||
.select('-__v -user')
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
const hasNextPage = sharedLinks.length > pageSize;
|
|
||||||
const links = sharedLinks.slice(0, pageSize);
|
|
||||||
|
|
||||||
const nextCursor = hasNextPage ? links[links.length - 1][sortBy] : undefined;
|
|
||||||
|
|
||||||
return {
|
|
||||||
links: links.map((link) => ({
|
|
||||||
shareId: link.shareId,
|
|
||||||
title: link?.title || 'Untitled',
|
|
||||||
isPublic: link.isPublic,
|
|
||||||
createdAt: link.createdAt,
|
|
||||||
conversationId: link.conversationId,
|
|
||||||
})),
|
|
||||||
nextCursor,
|
|
||||||
hasNextPage,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[getSharedLinks] Error getting shares', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error getting shares', 'SHARES_FETCH_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function deleteAllSharedLinks(user) {
|
|
||||||
try {
|
|
||||||
const result = await SharedLink.deleteMany({ user });
|
|
||||||
return {
|
|
||||||
message: 'All shared links deleted successfully',
|
|
||||||
deletedCount: result.deletedCount,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[deleteAllSharedLinks] Error deleting shared links', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error deleting shared links', 'BULK_DELETE_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createSharedLink(user, conversationId) {
|
|
||||||
if (!user || !conversationId) {
|
|
||||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const [existingShare, conversationMessages] = await Promise.all([
|
|
||||||
SharedLink.findOne({ conversationId, isPublic: true }).select('-_id -__v -user').lean(),
|
|
||||||
getMessages({ conversationId }),
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (existingShare && existingShare.isPublic) {
|
|
||||||
throw new ShareServiceError('Share already exists', 'SHARE_EXISTS');
|
|
||||||
} else if (existingShare) {
|
|
||||||
await SharedLink.deleteOne({ conversationId });
|
|
||||||
}
|
|
||||||
|
|
||||||
const conversation = await Conversation.findOne({ conversationId }).lean();
|
|
||||||
const title = conversation?.title || 'Untitled';
|
|
||||||
|
|
||||||
const shareId = nanoid();
|
|
||||||
await SharedLink.create({
|
|
||||||
shareId,
|
|
||||||
conversationId,
|
|
||||||
messages: conversationMessages,
|
|
||||||
title,
|
|
||||||
user,
|
|
||||||
});
|
|
||||||
|
|
||||||
return { shareId, conversationId };
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[createSharedLink] Error creating shared link', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
conversationId,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error creating shared link', 'SHARE_CREATE_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getSharedLink(user, conversationId) {
|
|
||||||
if (!user || !conversationId) {
|
|
||||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const share = await SharedLink.findOne({ conversationId, user, isPublic: true })
|
|
||||||
.select('shareId -_id')
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
if (!share) {
|
|
||||||
return { shareId: null, success: false };
|
|
||||||
}
|
|
||||||
|
|
||||||
return { shareId: share.shareId, success: true };
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[getSharedLink] Error getting shared link', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
conversationId,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error getting shared link', 'SHARE_FETCH_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function updateSharedLink(user, shareId) {
|
|
||||||
if (!user || !shareId) {
|
|
||||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const share = await SharedLink.findOne({ shareId }).select('-_id -__v -user').lean();
|
|
||||||
|
|
||||||
if (!share) {
|
|
||||||
throw new ShareServiceError('Share not found', 'SHARE_NOT_FOUND');
|
|
||||||
}
|
|
||||||
|
|
||||||
const [updatedMessages] = await Promise.all([
|
|
||||||
getMessages({ conversationId: share.conversationId }),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const newShareId = nanoid();
|
|
||||||
const update = {
|
|
||||||
messages: updatedMessages,
|
|
||||||
user,
|
|
||||||
shareId: newShareId,
|
|
||||||
};
|
|
||||||
|
|
||||||
const updatedShare = await SharedLink.findOneAndUpdate({ shareId, user }, update, {
|
|
||||||
new: true,
|
|
||||||
upsert: false,
|
|
||||||
runValidators: true,
|
|
||||||
}).lean();
|
|
||||||
|
|
||||||
if (!updatedShare) {
|
|
||||||
throw new ShareServiceError('Share update failed', 'SHARE_UPDATE_ERROR');
|
|
||||||
}
|
|
||||||
|
|
||||||
anonymizeConvo(updatedShare);
|
|
||||||
|
|
||||||
return { shareId: newShareId, conversationId: updatedShare.conversationId };
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[updateSharedLink] Error updating shared link', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
shareId,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError(
|
|
||||||
error.code === 'SHARE_UPDATE_ERROR' ? error.message : 'Error updating shared link',
|
|
||||||
error.code || 'SHARE_UPDATE_ERROR',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function deleteSharedLink(user, shareId) {
|
|
||||||
if (!user || !shareId) {
|
|
||||||
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await SharedLink.findOneAndDelete({ shareId, user }).lean();
|
|
||||||
|
|
||||||
if (!result) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
shareId,
|
|
||||||
message: 'Share deleted successfully',
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[deleteSharedLink] Error deleting shared link', {
|
|
||||||
error: error.message,
|
|
||||||
user,
|
|
||||||
shareId,
|
|
||||||
});
|
|
||||||
throw new ShareServiceError('Error deleting shared link', 'SHARE_DELETE_ERROR');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
SharedLink,
|
|
||||||
getSharedLink,
|
|
||||||
getSharedLinks,
|
|
||||||
createSharedLink,
|
|
||||||
updateSharedLink,
|
|
||||||
deleteSharedLink,
|
|
||||||
getSharedMessages,
|
|
||||||
deleteAllSharedLinks,
|
|
||||||
};
|
|
||||||
@@ -1,199 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { encryptV2 } = require('~/server/utils/crypto');
|
|
||||||
const { tokenSchema } = require('@librechat/data-schemas');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Token model.
|
|
||||||
* @type {mongoose.Model}
|
|
||||||
*/
|
|
||||||
const Token = mongoose.model('Token', tokenSchema);
|
|
||||||
/**
|
|
||||||
* Fixes the indexes for the Token collection from legacy TTL indexes to the new expiresAt index.
|
|
||||||
*/
|
|
||||||
async function fixIndexes() {
|
|
||||||
try {
|
|
||||||
if (
|
|
||||||
process.env.NODE_ENV === 'CI' ||
|
|
||||||
process.env.NODE_ENV === 'development' ||
|
|
||||||
process.env.NODE_ENV === 'test'
|
|
||||||
) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const indexes = await Token.collection.indexes();
|
|
||||||
logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
|
|
||||||
const unwantedTTLIndexes = indexes.filter(
|
|
||||||
(index) => index.key.createdAt === 1 && index.expireAfterSeconds !== undefined,
|
|
||||||
);
|
|
||||||
if (unwantedTTLIndexes.length === 0) {
|
|
||||||
logger.debug('No unwanted Token indexes found.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (const index of unwantedTTLIndexes) {
|
|
||||||
logger.debug(`Dropping unwanted Token index: ${index.name}`);
|
|
||||||
await Token.collection.dropIndex(index.name);
|
|
||||||
logger.debug(`Dropped Token index: ${index.name}`);
|
|
||||||
}
|
|
||||||
logger.debug('Token index cleanup completed successfully.');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('An error occurred while fixing Token indexes:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fixIndexes();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new Token instance.
|
|
||||||
* @param {Object} tokenData - The data for the new Token.
|
|
||||||
* @param {mongoose.Types.ObjectId} tokenData.userId - The user's ID. It is required.
|
|
||||||
* @param {String} tokenData.email - The user's email.
|
|
||||||
* @param {String} tokenData.token - The token. It is required.
|
|
||||||
* @param {Number} tokenData.expiresIn - The number of seconds until the token expires.
|
|
||||||
* @returns {Promise<mongoose.Document>} The new Token instance.
|
|
||||||
* @throws Will throw an error if token creation fails.
|
|
||||||
*/
|
|
||||||
async function createToken(tokenData) {
|
|
||||||
try {
|
|
||||||
const currentTime = new Date();
|
|
||||||
const expiresAt = new Date(currentTime.getTime() + tokenData.expiresIn * 1000);
|
|
||||||
|
|
||||||
const newTokenData = {
|
|
||||||
...tokenData,
|
|
||||||
createdAt: currentTime,
|
|
||||||
expiresAt,
|
|
||||||
};
|
|
||||||
|
|
||||||
return await Token.create(newTokenData);
|
|
||||||
} catch (error) {
|
|
||||||
logger.debug('An error occurred while creating token:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds a Token document that matches the provided query.
|
|
||||||
* @param {Object} query - The query to match against.
|
|
||||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
|
||||||
* @param {String} query.token - The token value.
|
|
||||||
* @param {String} [query.email] - The email of the user.
|
|
||||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
|
||||||
* @returns {Promise<Object|null>} The matched Token document, or null if not found.
|
|
||||||
* @throws Will throw an error if the find operation fails.
|
|
||||||
*/
|
|
||||||
async function findToken(query) {
|
|
||||||
try {
|
|
||||||
const conditions = [];
|
|
||||||
|
|
||||||
if (query.userId) {
|
|
||||||
conditions.push({ userId: query.userId });
|
|
||||||
}
|
|
||||||
if (query.token) {
|
|
||||||
conditions.push({ token: query.token });
|
|
||||||
}
|
|
||||||
if (query.email) {
|
|
||||||
conditions.push({ email: query.email });
|
|
||||||
}
|
|
||||||
if (query.identifier) {
|
|
||||||
conditions.push({ identifier: query.identifier });
|
|
||||||
}
|
|
||||||
|
|
||||||
const token = await Token.findOne({
|
|
||||||
$and: conditions,
|
|
||||||
}).lean();
|
|
||||||
|
|
||||||
return token;
|
|
||||||
} catch (error) {
|
|
||||||
logger.debug('An error occurred while finding token:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates a Token document that matches the provided query.
|
|
||||||
* @param {Object} query - The query to match against.
|
|
||||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
|
||||||
* @param {String} query.token - The token value.
|
|
||||||
* @param {String} [query.email] - The email of the user.
|
|
||||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
|
||||||
* @param {Object} updateData - The data to update the Token with.
|
|
||||||
* @returns {Promise<mongoose.Document|null>} The updated Token document, or null if not found.
|
|
||||||
* @throws Will throw an error if the update operation fails.
|
|
||||||
*/
|
|
||||||
async function updateToken(query, updateData) {
|
|
||||||
try {
|
|
||||||
return await Token.findOneAndUpdate(query, updateData, { new: true });
|
|
||||||
} catch (error) {
|
|
||||||
logger.debug('An error occurred while updating token:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes all Token documents that match the provided token, user ID, or email.
|
|
||||||
* @param {Object} query - The query to match against.
|
|
||||||
* @param {mongoose.Types.ObjectId|String} query.userId - The ID of the user.
|
|
||||||
* @param {String} query.token - The token value.
|
|
||||||
* @param {String} [query.email] - The email of the user.
|
|
||||||
* @param {String} [query.identifier] - Unique, alternative identifier for the token.
|
|
||||||
* @returns {Promise<Object>} The result of the delete operation.
|
|
||||||
* @throws Will throw an error if the delete operation fails.
|
|
||||||
*/
|
|
||||||
async function deleteTokens(query) {
|
|
||||||
try {
|
|
||||||
return await Token.deleteMany({
|
|
||||||
$or: [
|
|
||||||
{ userId: query.userId },
|
|
||||||
{ token: query.token },
|
|
||||||
{ email: query.email },
|
|
||||||
{ identifier: query.identifier },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.debug('An error occurred while deleting tokens:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles the OAuth token by creating or updating the token.
|
|
||||||
* @param {object} fields
|
|
||||||
* @param {string} fields.userId - The user's ID.
|
|
||||||
* @param {string} fields.token - The full token to store.
|
|
||||||
* @param {string} fields.identifier - Unique, alternative identifier for the token.
|
|
||||||
* @param {number} fields.expiresIn - The number of seconds until the token expires.
|
|
||||||
* @param {object} fields.metadata - Additional metadata to store with the token.
|
|
||||||
* @param {string} [fields.type="oauth"] - The type of token. Default is 'oauth'.
|
|
||||||
*/
|
|
||||||
async function handleOAuthToken({
|
|
||||||
token,
|
|
||||||
userId,
|
|
||||||
identifier,
|
|
||||||
expiresIn,
|
|
||||||
metadata,
|
|
||||||
type = 'oauth',
|
|
||||||
}) {
|
|
||||||
const encrypedToken = await encryptV2(token);
|
|
||||||
const tokenData = {
|
|
||||||
type,
|
|
||||||
userId,
|
|
||||||
metadata,
|
|
||||||
identifier,
|
|
||||||
token: encrypedToken,
|
|
||||||
expiresIn: parseInt(expiresIn, 10) || 3600,
|
|
||||||
};
|
|
||||||
|
|
||||||
const existingToken = await findToken({ userId, identifier });
|
|
||||||
if (existingToken) {
|
|
||||||
return await updateToken({ identifier }, tokenData);
|
|
||||||
} else {
|
|
||||||
return await createToken(tokenData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
findToken,
|
|
||||||
createToken,
|
|
||||||
updateToken,
|
|
||||||
deleteTokens,
|
|
||||||
handleOAuthToken,
|
|
||||||
};
|
|
||||||
@@ -1,6 +1,4 @@
|
|||||||
const mongoose = require('mongoose');
|
const { ToolCall } = require('~/db/models');
|
||||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
|
||||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new tool call
|
* Create a new tool call
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
const mongoose = require('mongoose');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { transactionSchema } = require('@librechat/data-schemas');
|
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
const { getBalanceConfig } = require('~/server/services/Config');
|
||||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||||
const { logger } = require('~/config');
|
const { Transaction, Balance } = require('~/db/models');
|
||||||
const Balance = require('./Balance');
|
|
||||||
|
|
||||||
const cancelRate = 1.15;
|
const cancelRate = 1.15;
|
||||||
|
|
||||||
@@ -140,19 +138,19 @@ const updateBalance = async ({ user, incrementValue, setValues }) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/** Method to calculate and set the tokenValue for a transaction */
|
/** Method to calculate and set the tokenValue for a transaction */
|
||||||
transactionSchema.methods.calculateTokenValue = function () {
|
function calculateTokenValue(txn) {
|
||||||
if (!this.valueKey || !this.tokenType) {
|
if (!txn.valueKey || !txn.tokenType) {
|
||||||
this.tokenValue = this.rawAmount;
|
txn.tokenValue = txn.rawAmount;
|
||||||
}
|
}
|
||||||
const { valueKey, tokenType, model, endpointTokenConfig } = this;
|
const { valueKey, tokenType, model, endpointTokenConfig } = txn;
|
||||||
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
const multiplier = Math.abs(getMultiplier({ valueKey, tokenType, model, endpointTokenConfig }));
|
||||||
this.rate = multiplier;
|
txn.rate = multiplier;
|
||||||
this.tokenValue = this.rawAmount * multiplier;
|
txn.tokenValue = txn.rawAmount * multiplier;
|
||||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||||
this.rate *= cancelRate;
|
txn.rate *= cancelRate;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* New static method to create an auto-refill transaction that does NOT trigger a balance update.
|
* New static method to create an auto-refill transaction that does NOT trigger a balance update.
|
||||||
@@ -163,13 +161,13 @@ transactionSchema.methods.calculateTokenValue = function () {
|
|||||||
* @param {number} txData.rawAmount - The raw amount of tokens.
|
* @param {number} txData.rawAmount - The raw amount of tokens.
|
||||||
* @returns {Promise<object>} - The created transaction.
|
* @returns {Promise<object>} - The created transaction.
|
||||||
*/
|
*/
|
||||||
transactionSchema.statics.createAutoRefillTransaction = async function (txData) {
|
async function createAutoRefillTransaction(txData) {
|
||||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const transaction = new this(txData);
|
const transaction = new Transaction(txData);
|
||||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||||
transaction.calculateTokenValue();
|
calculateTokenValue(transaction);
|
||||||
await transaction.save();
|
await transaction.save();
|
||||||
|
|
||||||
const balanceResponse = await updateBalance({
|
const balanceResponse = await updateBalance({
|
||||||
@@ -185,21 +183,20 @@ transactionSchema.statics.createAutoRefillTransaction = async function (txData)
|
|||||||
logger.debug('[Balance.check] Auto-refill performed', result);
|
logger.debug('[Balance.check] Auto-refill performed', result);
|
||||||
result.transaction = transaction;
|
result.transaction = transaction;
|
||||||
return result;
|
return result;
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to create a transaction and update the balance
|
* Static method to create a transaction and update the balance
|
||||||
* @param {txData} txData - Transaction data.
|
* @param {txData} txData - Transaction data.
|
||||||
*/
|
*/
|
||||||
transactionSchema.statics.create = async function (txData) {
|
async function createTransaction(txData) {
|
||||||
const Transaction = this;
|
|
||||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const transaction = new Transaction(txData);
|
const transaction = new Transaction(txData);
|
||||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||||
transaction.calculateTokenValue();
|
calculateTokenValue(transaction);
|
||||||
|
|
||||||
await transaction.save();
|
await transaction.save();
|
||||||
|
|
||||||
@@ -209,7 +206,6 @@ transactionSchema.statics.create = async function (txData) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let incrementValue = transaction.tokenValue;
|
let incrementValue = transaction.tokenValue;
|
||||||
|
|
||||||
const balanceResponse = await updateBalance({
|
const balanceResponse = await updateBalance({
|
||||||
user: transaction.user,
|
user: transaction.user,
|
||||||
incrementValue,
|
incrementValue,
|
||||||
@@ -221,21 +217,19 @@ transactionSchema.statics.create = async function (txData) {
|
|||||||
balance: balanceResponse.tokenCredits,
|
balance: balanceResponse.tokenCredits,
|
||||||
[transaction.tokenType]: incrementValue,
|
[transaction.tokenType]: incrementValue,
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Static method to create a structured transaction and update the balance
|
* Static method to create a structured transaction and update the balance
|
||||||
* @param {txData} txData - Transaction data.
|
* @param {txData} txData - Transaction data.
|
||||||
*/
|
*/
|
||||||
transactionSchema.statics.createStructured = async function (txData) {
|
async function createStructuredTransaction(txData) {
|
||||||
const Transaction = this;
|
|
||||||
|
|
||||||
const transaction = new Transaction({
|
const transaction = new Transaction({
|
||||||
...txData,
|
...txData,
|
||||||
endpointTokenConfig: txData.endpointTokenConfig,
|
endpointTokenConfig: txData.endpointTokenConfig,
|
||||||
});
|
});
|
||||||
|
|
||||||
transaction.calculateStructuredTokenValue();
|
calculateStructuredTokenValue(transaction);
|
||||||
|
|
||||||
await transaction.save();
|
await transaction.save();
|
||||||
|
|
||||||
@@ -257,71 +251,69 @@ transactionSchema.statics.createStructured = async function (txData) {
|
|||||||
balance: balanceResponse.tokenCredits,
|
balance: balanceResponse.tokenCredits,
|
||||||
[transaction.tokenType]: incrementValue,
|
[transaction.tokenType]: incrementValue,
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|
||||||
/** Method to calculate token value for structured tokens */
|
/** Method to calculate token value for structured tokens */
|
||||||
transactionSchema.methods.calculateStructuredTokenValue = function () {
|
function calculateStructuredTokenValue(txn) {
|
||||||
if (!this.tokenType) {
|
if (!txn.tokenType) {
|
||||||
this.tokenValue = this.rawAmount;
|
txn.tokenValue = txn.rawAmount;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { model, endpointTokenConfig } = this;
|
const { model, endpointTokenConfig } = txn;
|
||||||
|
|
||||||
if (this.tokenType === 'prompt') {
|
if (txn.tokenType === 'prompt') {
|
||||||
const inputMultiplier = getMultiplier({ tokenType: 'prompt', model, endpointTokenConfig });
|
const inputMultiplier = getMultiplier({ tokenType: 'prompt', model, endpointTokenConfig });
|
||||||
const writeMultiplier =
|
const writeMultiplier =
|
||||||
getCacheMultiplier({ cacheType: 'write', model, endpointTokenConfig }) ?? inputMultiplier;
|
getCacheMultiplier({ cacheType: 'write', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||||
const readMultiplier =
|
const readMultiplier =
|
||||||
getCacheMultiplier({ cacheType: 'read', model, endpointTokenConfig }) ?? inputMultiplier;
|
getCacheMultiplier({ cacheType: 'read', model, endpointTokenConfig }) ?? inputMultiplier;
|
||||||
|
|
||||||
this.rateDetail = {
|
txn.rateDetail = {
|
||||||
input: inputMultiplier,
|
input: inputMultiplier,
|
||||||
write: writeMultiplier,
|
write: writeMultiplier,
|
||||||
read: readMultiplier,
|
read: readMultiplier,
|
||||||
};
|
};
|
||||||
|
|
||||||
const totalPromptTokens =
|
const totalPromptTokens =
|
||||||
Math.abs(this.inputTokens || 0) +
|
Math.abs(txn.inputTokens || 0) +
|
||||||
Math.abs(this.writeTokens || 0) +
|
Math.abs(txn.writeTokens || 0) +
|
||||||
Math.abs(this.readTokens || 0);
|
Math.abs(txn.readTokens || 0);
|
||||||
|
|
||||||
if (totalPromptTokens > 0) {
|
if (totalPromptTokens > 0) {
|
||||||
this.rate =
|
txn.rate =
|
||||||
(Math.abs(inputMultiplier * (this.inputTokens || 0)) +
|
(Math.abs(inputMultiplier * (txn.inputTokens || 0)) +
|
||||||
Math.abs(writeMultiplier * (this.writeTokens || 0)) +
|
Math.abs(writeMultiplier * (txn.writeTokens || 0)) +
|
||||||
Math.abs(readMultiplier * (this.readTokens || 0))) /
|
Math.abs(readMultiplier * (txn.readTokens || 0))) /
|
||||||
totalPromptTokens;
|
totalPromptTokens;
|
||||||
} else {
|
} else {
|
||||||
this.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
txn.rate = Math.abs(inputMultiplier); // Default to input rate if no tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
this.tokenValue = -(
|
txn.tokenValue = -(
|
||||||
Math.abs(this.inputTokens || 0) * inputMultiplier +
|
Math.abs(txn.inputTokens || 0) * inputMultiplier +
|
||||||
Math.abs(this.writeTokens || 0) * writeMultiplier +
|
Math.abs(txn.writeTokens || 0) * writeMultiplier +
|
||||||
Math.abs(this.readTokens || 0) * readMultiplier
|
Math.abs(txn.readTokens || 0) * readMultiplier
|
||||||
);
|
);
|
||||||
|
|
||||||
this.rawAmount = -totalPromptTokens;
|
txn.rawAmount = -totalPromptTokens;
|
||||||
} else if (this.tokenType === 'completion') {
|
} else if (txn.tokenType === 'completion') {
|
||||||
const multiplier = getMultiplier({ tokenType: this.tokenType, model, endpointTokenConfig });
|
const multiplier = getMultiplier({ tokenType: txn.tokenType, model, endpointTokenConfig });
|
||||||
this.rate = Math.abs(multiplier);
|
txn.rate = Math.abs(multiplier);
|
||||||
this.tokenValue = -Math.abs(this.rawAmount) * multiplier;
|
txn.tokenValue = -Math.abs(txn.rawAmount) * multiplier;
|
||||||
this.rawAmount = -Math.abs(this.rawAmount);
|
txn.rawAmount = -Math.abs(txn.rawAmount);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.context && this.tokenType === 'completion' && this.context === 'incomplete') {
|
if (txn.context && txn.tokenType === 'completion' && txn.context === 'incomplete') {
|
||||||
this.tokenValue = Math.ceil(this.tokenValue * cancelRate);
|
txn.tokenValue = Math.ceil(txn.tokenValue * cancelRate);
|
||||||
this.rate *= cancelRate;
|
txn.rate *= cancelRate;
|
||||||
if (this.rateDetail) {
|
if (txn.rateDetail) {
|
||||||
this.rateDetail = Object.fromEntries(
|
txn.rateDetail = Object.fromEntries(
|
||||||
Object.entries(this.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
Object.entries(txn.rateDetail).map(([k, v]) => [k, v * cancelRate]),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
const Transaction = mongoose.model('Transaction', transactionSchema);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Queries and retrieves transactions based on a given filter.
|
* Queries and retrieves transactions based on a given filter.
|
||||||
@@ -340,4 +332,9 @@ async function getTransactions(filter) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { Transaction, getTransactions };
|
module.exports = {
|
||||||
|
getTransactions,
|
||||||
|
createTransaction,
|
||||||
|
createAutoRefillTransaction,
|
||||||
|
createStructuredTransaction,
|
||||||
|
};
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ const { MongoMemoryServer } = require('mongodb-memory-server');
|
|||||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
const { getBalanceConfig } = require('~/server/services/Config');
|
||||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||||
const { Transaction } = require('./Transaction');
|
const { createTransaction } = require('./Transaction');
|
||||||
const Balance = require('./Balance');
|
const { Balance } = require('~/db/models');
|
||||||
|
|
||||||
// Mock the custom config module so we can control the balance flag.
|
// Mock the custom config module so we can control the balance flag.
|
||||||
jest.mock('~/server/services/Config');
|
jest.mock('~/server/services/Config');
|
||||||
|
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongoServer = await MongoMemoryServer.create();
|
mongoServer = await MongoMemoryServer.create();
|
||||||
const mongoUri = mongoServer.getUri();
|
const mongoUri = mongoServer.getUri();
|
||||||
@@ -368,7 +367,7 @@ describe('NaN Handling Tests', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const result = await Transaction.create(txData);
|
const result = await createTransaction(txData);
|
||||||
|
|
||||||
// Assert: No transaction should be created and balance remains unchanged.
|
// Assert: No transaction should be created and balance remains unchanged.
|
||||||
expect(result).toBeUndefined();
|
expect(result).toBeUndefined();
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { userSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const User = mongoose.model('User', userSchema);
|
|
||||||
|
|
||||||
module.exports = User;
|
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { ViolationTypes } = require('librechat-data-provider');
|
const { ViolationTypes } = require('librechat-data-provider');
|
||||||
const { Transaction } = require('./Transaction');
|
const { createAutoRefillTransaction } = require('./Transaction');
|
||||||
const { logViolation } = require('~/cache');
|
const { logViolation } = require('~/cache');
|
||||||
const { getMultiplier } = require('./tx');
|
const { getMultiplier } = require('./tx');
|
||||||
const { logger } = require('~/config');
|
const { Balance } = require('~/db/models');
|
||||||
const Balance = require('./Balance');
|
|
||||||
|
|
||||||
function isInvalidDate(date) {
|
function isInvalidDate(date) {
|
||||||
return isNaN(date);
|
return isNaN(date);
|
||||||
@@ -60,7 +60,7 @@ const checkBalanceRecord = async function ({
|
|||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
/** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
|
/** @type {{ rate: number, user: string, balance: number, transaction: import('@librechat/data-schemas').ITransaction}} */
|
||||||
const result = await Transaction.createAutoRefillTransaction({
|
const result = await createAutoRefillTransaction({
|
||||||
user: user,
|
user: user,
|
||||||
tokenType: 'credits',
|
tokenType: 'credits',
|
||||||
context: 'autoRefill',
|
context: 'autoRefill',
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { Message, getMessages, bulkSaveMessages } = require('./Message');
|
const { getMessages, bulkSaveMessages } = require('./Message');
|
||||||
|
const { Message } = require('~/db/models');
|
||||||
|
|
||||||
// Original version of buildTree function
|
// Original version of buildTree function
|
||||||
function buildTree({ messages, fileMap }) {
|
function buildTree({ messages, fileMap }) {
|
||||||
@@ -42,7 +43,6 @@ function buildTree({ messages, fileMap }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mongod;
|
let mongod;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongod = await MongoMemoryServer.create();
|
mongod = await MongoMemoryServer.create();
|
||||||
const uri = mongod.getUri();
|
const uri = mongod.getUri();
|
||||||
|
|||||||
@@ -1,13 +1,7 @@
|
|||||||
const {
|
const mongoose = require('mongoose');
|
||||||
comparePassword,
|
const { createMethods } = require('@librechat/data-schemas');
|
||||||
deleteUserById,
|
const methods = createMethods(mongoose);
|
||||||
generateToken,
|
const { comparePassword } = require('./userMethods');
|
||||||
getUserById,
|
|
||||||
updateUser,
|
|
||||||
createUser,
|
|
||||||
countUsers,
|
|
||||||
findUser,
|
|
||||||
} = require('./userMethods');
|
|
||||||
const {
|
const {
|
||||||
findFileById,
|
findFileById,
|
||||||
createFile,
|
createFile,
|
||||||
@@ -26,32 +20,12 @@ const {
|
|||||||
deleteMessagesSince,
|
deleteMessagesSince,
|
||||||
deleteMessages,
|
deleteMessages,
|
||||||
} = require('./Message');
|
} = require('./Message');
|
||||||
const {
|
|
||||||
createSession,
|
|
||||||
findSession,
|
|
||||||
updateExpiration,
|
|
||||||
deleteSession,
|
|
||||||
deleteAllUserSessions,
|
|
||||||
generateRefreshToken,
|
|
||||||
countActiveSessions,
|
|
||||||
} = require('./Session');
|
|
||||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||||
const { createToken, findToken, updateToken, deleteTokens } = require('./Token');
|
|
||||||
const Balance = require('./Balance');
|
|
||||||
const User = require('./User');
|
|
||||||
const Key = require('./Key');
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
...methods,
|
||||||
comparePassword,
|
comparePassword,
|
||||||
deleteUserById,
|
|
||||||
generateToken,
|
|
||||||
getUserById,
|
|
||||||
updateUser,
|
|
||||||
createUser,
|
|
||||||
countUsers,
|
|
||||||
findUser,
|
|
||||||
|
|
||||||
findFileById,
|
findFileById,
|
||||||
createFile,
|
createFile,
|
||||||
updateFile,
|
updateFile,
|
||||||
@@ -77,21 +51,4 @@ module.exports = {
|
|||||||
getPresets,
|
getPresets,
|
||||||
savePreset,
|
savePreset,
|
||||||
deletePresets,
|
deletePresets,
|
||||||
|
|
||||||
createToken,
|
|
||||||
findToken,
|
|
||||||
updateToken,
|
|
||||||
deleteTokens,
|
|
||||||
|
|
||||||
createSession,
|
|
||||||
findSession,
|
|
||||||
updateExpiration,
|
|
||||||
deleteSession,
|
|
||||||
deleteAllUserSessions,
|
|
||||||
generateRefreshToken,
|
|
||||||
countActiveSessions,
|
|
||||||
|
|
||||||
User,
|
|
||||||
Key,
|
|
||||||
Balance,
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { getRandomValues, hashToken } = require('~/server/utils/crypto');
|
const { getRandomValues } = require('@librechat/api');
|
||||||
const { createToken, findToken } = require('./Token');
|
const { logger, hashToken } = require('@librechat/data-schemas');
|
||||||
const logger = require('~/config/winston');
|
const { createToken, findToken } = require('~/models');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @module inviteUser
|
* @module inviteUser
|
||||||
|
|||||||
@@ -1,475 +0,0 @@
|
|||||||
const _ = require('lodash');
|
|
||||||
const mongoose = require('mongoose');
|
|
||||||
const { MeiliSearch } = require('meilisearch');
|
|
||||||
const { parseTextParts, ContentTypes } = require('librechat-data-provider');
|
|
||||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
|
||||||
const logger = require('~/config/meiliLogger');
|
|
||||||
|
|
||||||
// Environment flags
|
|
||||||
/**
|
|
||||||
* Flag to indicate if search is enabled based on environment variables.
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
|
||||||
* @type {boolean}
|
|
||||||
*/
|
|
||||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates the required options for configuring the mongoMeili plugin.
|
|
||||||
*
|
|
||||||
* @param {Object} options - The configuration options.
|
|
||||||
* @param {string} options.host - The MeiliSearch host.
|
|
||||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
|
||||||
* @param {string} options.indexName - The name of the index.
|
|
||||||
* @throws {Error} Throws an error if any required option is missing.
|
|
||||||
*/
|
|
||||||
const validateOptions = function (options) {
|
|
||||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
|
||||||
requiredKeys.forEach((key) => {
|
|
||||||
if (!options[key]) {
|
|
||||||
throw new Error(`Missing mongoMeili Option: ${key}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
|
||||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
|
||||||
* corresponding to the MongoDB collection.
|
|
||||||
*
|
|
||||||
* @param {Object} config - Configuration object.
|
|
||||||
* @param {Object} config.index - The MeiliSearch index object.
|
|
||||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
|
||||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
|
||||||
*/
|
|
||||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|
||||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
|
||||||
const primaryKey = attributesToIndex[0];
|
|
||||||
|
|
||||||
class MeiliMongooseModel {
|
|
||||||
/**
|
|
||||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
|
||||||
*
|
|
||||||
* The synchronization process involves:
|
|
||||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
|
||||||
* 2. Comparing documents from both sources.
|
|
||||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
|
||||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
|
||||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
|
||||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
|
||||||
*
|
|
||||||
* Note: The function processes documents in batches because MeiliSearch's
|
|
||||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
|
||||||
* partial failures in a batch.
|
|
||||||
*
|
|
||||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
|
||||||
*/
|
|
||||||
static async syncWithMeili() {
|
|
||||||
try {
|
|
||||||
let moreDocuments = true;
|
|
||||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
|
||||||
const mongoDocuments = await this.find().lean();
|
|
||||||
|
|
||||||
// Helper function to format a document by selecting only the attributes to index
|
|
||||||
// and omitting keys starting with '$'.
|
|
||||||
const format = (doc) =>
|
|
||||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
|
||||||
|
|
||||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
|
||||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
|
||||||
const indexMap = new Map();
|
|
||||||
let offset = 0;
|
|
||||||
const batchSize = 1000;
|
|
||||||
|
|
||||||
// Fetch documents from the MeiliSearch index in batches.
|
|
||||||
while (moreDocuments) {
|
|
||||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
|
||||||
if (batch.results.length === 0) {
|
|
||||||
moreDocuments = false;
|
|
||||||
}
|
|
||||||
for (const doc of batch.results) {
|
|
||||||
indexMap.set(doc[primaryKey], format(doc));
|
|
||||||
}
|
|
||||||
offset += batchSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
|
||||||
|
|
||||||
const updateOps = [];
|
|
||||||
|
|
||||||
// Process documents present in the MeiliSearch index.
|
|
||||||
for (const [id, doc] of indexMap) {
|
|
||||||
const update = {};
|
|
||||||
update[primaryKey] = id;
|
|
||||||
if (mongoMap.has(id)) {
|
|
||||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
|
||||||
if (
|
|
||||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
|
||||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
|
||||||
) {
|
|
||||||
logger.debug(
|
|
||||||
`[syncWithMeili] ${id} had document discrepancy in ${
|
|
||||||
doc.text ? 'text' : 'title'
|
|
||||||
} field`,
|
|
||||||
);
|
|
||||||
updateOps.push({
|
|
||||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
||||||
});
|
|
||||||
await index.addDocuments([doc]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
|
||||||
await index.deleteDocument(id);
|
|
||||||
updateOps.push({
|
|
||||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process documents present in MongoDB.
|
|
||||||
for (const [id, doc] of mongoMap) {
|
|
||||||
const update = {};
|
|
||||||
update[primaryKey] = id;
|
|
||||||
// If the document is missing in the Meili index, add it.
|
|
||||||
if (!indexMap.has(id)) {
|
|
||||||
await index.addDocuments([doc]);
|
|
||||||
updateOps.push({
|
|
||||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
||||||
});
|
|
||||||
} else if (doc._meiliIndex === false) {
|
|
||||||
// If the document exists but is marked as not indexed, update the flag.
|
|
||||||
updateOps.push({
|
|
||||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
|
||||||
if (updateOps.length > 0) {
|
|
||||||
await this.collection.bulkWrite(updateOps);
|
|
||||||
logger.debug(
|
|
||||||
`[syncWithMeili] Finished indexing ${
|
|
||||||
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
|
||||||
}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[syncWithMeili] Error adding document to Meili', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates settings for the MeiliSearch index.
|
|
||||||
*
|
|
||||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
|
||||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
|
||||||
*/
|
|
||||||
static async setMeiliIndexSettings(settings) {
|
|
||||||
return await index.updateSettings(settings);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
|
||||||
*
|
|
||||||
* @param {string} q - The search query.
|
|
||||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
|
||||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
|
||||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
|
||||||
*/
|
|
||||||
static async meiliSearch(q, params, populate) {
|
|
||||||
const data = await index.search(q, params);
|
|
||||||
|
|
||||||
if (populate) {
|
|
||||||
// Build a query using the primary key values from the search hits.
|
|
||||||
const query = {};
|
|
||||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
|
||||||
|
|
||||||
// Build a projection object, including only keys that do not start with '$'.
|
|
||||||
const projection = Object.keys(this.schema.obj).reduce(
|
|
||||||
(results, key) => {
|
|
||||||
if (!key.startsWith('$')) {
|
|
||||||
results[key] = 1;
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
},
|
|
||||||
{ _id: 1, __v: 1 },
|
|
||||||
);
|
|
||||||
|
|
||||||
// Retrieve the full documents from MongoDB.
|
|
||||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
|
||||||
|
|
||||||
// Merge the MongoDB documents with the search hits.
|
|
||||||
const populatedHits = data.hits.map(function (hit) {
|
|
||||||
const query = {};
|
|
||||||
query[primaryKey] = hit[primaryKey];
|
|
||||||
const originalHit = _.find(hitsFromMongoose, query);
|
|
||||||
|
|
||||||
return {
|
|
||||||
...(originalHit ?? {}),
|
|
||||||
...hit,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
data.hits = populatedHits;
|
|
||||||
}
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Preprocesses the current document for indexing.
|
|
||||||
*
|
|
||||||
* This method:
|
|
||||||
* - Picks only the defined attributes to index.
|
|
||||||
* - Omits any keys starting with '$'.
|
|
||||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
|
||||||
* - Extracts and concatenates text from an array of content items.
|
|
||||||
*
|
|
||||||
* @returns {Object} The preprocessed object ready for indexing.
|
|
||||||
*/
|
|
||||||
preprocessObjectForIndex() {
|
|
||||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
|
||||||
k.startsWith('$'),
|
|
||||||
);
|
|
||||||
if (object.conversationId && object.conversationId.includes('|')) {
|
|
||||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (object.content && Array.isArray(object.content)) {
|
|
||||||
object.text = parseTextParts(object.content);
|
|
||||||
delete object.content;
|
|
||||||
}
|
|
||||||
|
|
||||||
return object;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds the current document to the MeiliSearch index.
|
|
||||||
*
|
|
||||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
|
||||||
* the MongoDB document's `_meiliIndex` flag to true.
|
|
||||||
*
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
async addObjectToMeili() {
|
|
||||||
const object = this.preprocessObjectForIndex();
|
|
||||||
try {
|
|
||||||
await index.addDocuments([object]);
|
|
||||||
} catch (error) {
|
|
||||||
// Error handling can be enhanced as needed.
|
|
||||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates the current document in the MeiliSearch index.
|
|
||||||
*
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
async updateObjectToMeili() {
|
|
||||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
|
||||||
k.startsWith('$'),
|
|
||||||
);
|
|
||||||
await index.updateDocuments([object]);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes the current document from the MeiliSearch index.
|
|
||||||
*
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
async deleteObjectFromMeili() {
|
|
||||||
await index.deleteDocument(this._id);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Post-save hook to synchronize the document with MeiliSearch.
|
|
||||||
*
|
|
||||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
|
||||||
* otherwise, it adds the document to the index.
|
|
||||||
*/
|
|
||||||
postSaveHook() {
|
|
||||||
if (this._meiliIndex) {
|
|
||||||
this.updateObjectToMeili();
|
|
||||||
} else {
|
|
||||||
this.addObjectToMeili();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Post-update hook to update the document in MeiliSearch.
|
|
||||||
*
|
|
||||||
* This hook is triggered after a document update, ensuring that changes are
|
|
||||||
* propagated to the MeiliSearch index if the document is indexed.
|
|
||||||
*/
|
|
||||||
postUpdateHook() {
|
|
||||||
if (this._meiliIndex) {
|
|
||||||
this.updateObjectToMeili();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Post-remove hook to delete the document from MeiliSearch.
|
|
||||||
*
|
|
||||||
* This hook is triggered after a document is removed, ensuring that the document
|
|
||||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
|
||||||
*/
|
|
||||||
postRemoveHook() {
|
|
||||||
if (this._meiliIndex) {
|
|
||||||
this.deleteObjectFromMeili();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return MeiliMongooseModel;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
|
||||||
*
|
|
||||||
* This plugin:
|
|
||||||
* - Validates the provided options.
|
|
||||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
|
||||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
|
||||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
|
||||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
|
||||||
*
|
|
||||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
|
||||||
* @param {Object} options - Configuration options.
|
|
||||||
* @param {string} options.host - The MeiliSearch host.
|
|
||||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
|
||||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
|
||||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
|
||||||
*/
|
|
||||||
module.exports = function mongoMeili(schema, options) {
|
|
||||||
validateOptions(options);
|
|
||||||
|
|
||||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
|
||||||
schema.add({
|
|
||||||
_meiliIndex: {
|
|
||||||
type: Boolean,
|
|
||||||
required: false,
|
|
||||||
select: false,
|
|
||||||
default: false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { host, apiKey, indexName, primaryKey } = options;
|
|
||||||
|
|
||||||
// Setup the MeiliSearch client.
|
|
||||||
const client = new MeiliSearch({ host, apiKey });
|
|
||||||
|
|
||||||
// Create the index asynchronously if it doesn't exist.
|
|
||||||
client.createIndex(indexName, { primaryKey });
|
|
||||||
|
|
||||||
// Setup the MeiliSearch index for this schema.
|
|
||||||
const index = client.index(indexName);
|
|
||||||
|
|
||||||
// Collect attributes from the schema that should be indexed.
|
|
||||||
const attributesToIndex = [
|
|
||||||
..._.reduce(
|
|
||||||
schema.obj,
|
|
||||||
function (results, value, key) {
|
|
||||||
return value.meiliIndex ? [...results, key] : results;
|
|
||||||
},
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Load the class methods into the schema.
|
|
||||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
|
||||||
|
|
||||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
|
||||||
|
|
||||||
// Post-save: synchronize after a document is saved.
|
|
||||||
schema.post('save', function (doc) {
|
|
||||||
doc.postSaveHook();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post-update: synchronize after a document is updated.
|
|
||||||
schema.post('update', function (doc) {
|
|
||||||
doc.postUpdateHook();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post-remove: synchronize after a document is removed.
|
|
||||||
schema.post('remove', function (doc) {
|
|
||||||
doc.postRemoveHook();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
|
||||||
schema.pre('deleteMany', async function (next) {
|
|
||||||
if (!meiliEnabled) {
|
|
||||||
return next();
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
|
||||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
|
||||||
const convoIndex = client.index('convos');
|
|
||||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
|
||||||
const promises = deletedConvos.map((convo) =>
|
|
||||||
convoIndex.deleteDocument(convo.conversationId),
|
|
||||||
);
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
|
||||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
|
||||||
const messageIndex = client.index('messages');
|
|
||||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
|
||||||
const promises = deletedMessages.map((message) =>
|
|
||||||
messageIndex.deleteDocument(message.messageId),
|
|
||||||
);
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
|
||||||
return next();
|
|
||||||
} catch (error) {
|
|
||||||
if (meiliEnabled) {
|
|
||||||
logger.error(
|
|
||||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
|
||||||
error,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return next();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
|
||||||
schema.post('findOneAndUpdate', async function (doc) {
|
|
||||||
if (!meiliEnabled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the document is unfinished, do not update the index.
|
|
||||||
if (doc.unfinished) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let meiliDoc;
|
|
||||||
// For conversation documents, try to fetch the document from the "convos" index.
|
|
||||||
if (doc.messages) {
|
|
||||||
try {
|
|
||||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
|
||||||
} catch (error) {
|
|
||||||
logger.debug(
|
|
||||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
|
||||||
doc.conversationId,
|
|
||||||
error,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
|
||||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
|
||||||
doc.postSaveHook();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const mongoMeili = require('../plugins/mongoMeili');
|
|
||||||
|
|
||||||
const { convoSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
|
||||||
convoSchema.plugin(mongoMeili, {
|
|
||||||
host: process.env.MEILI_HOST,
|
|
||||||
apiKey: process.env.MEILI_MASTER_KEY,
|
|
||||||
/** Note: Will get created automatically if it doesn't exist already */
|
|
||||||
indexName: 'convos',
|
|
||||||
primaryKey: 'conversationId',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
|
||||||
|
|
||||||
module.exports = Conversation;
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
|
||||||
const { messageSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
|
||||||
messageSchema.plugin(mongoMeili, {
|
|
||||||
host: process.env.MEILI_HOST,
|
|
||||||
apiKey: process.env.MEILI_MASTER_KEY,
|
|
||||||
indexName: 'messages',
|
|
||||||
primaryKey: 'messageId',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
|
||||||
|
|
||||||
module.exports = Message;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
|
||||||
|
|
||||||
module.exports = PluginAuth;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
const mongoose = require('mongoose');
|
|
||||||
const { presetSchema } = require('@librechat/data-schemas');
|
|
||||||
|
|
||||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
|
||||||
|
|
||||||
module.exports = Preset;
|
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
const { Transaction } = require('./Transaction');
|
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||||
/**
|
/**
|
||||||
* Creates up to two transactions to record the spending of tokens.
|
* Creates up to two transactions to record the spending of tokens.
|
||||||
*
|
*
|
||||||
@@ -33,7 +32,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
|||||||
let prompt, completion;
|
let prompt, completion;
|
||||||
try {
|
try {
|
||||||
if (promptTokens !== undefined) {
|
if (promptTokens !== undefined) {
|
||||||
prompt = await Transaction.create({
|
prompt = await createTransaction({
|
||||||
...txData,
|
...txData,
|
||||||
tokenType: 'prompt',
|
tokenType: 'prompt',
|
||||||
rawAmount: promptTokens === 0 ? 0 : -Math.max(promptTokens, 0),
|
rawAmount: promptTokens === 0 ? 0 : -Math.max(promptTokens, 0),
|
||||||
@@ -41,7 +40,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (completionTokens !== undefined) {
|
if (completionTokens !== undefined) {
|
||||||
completion = await Transaction.create({
|
completion = await createTransaction({
|
||||||
...txData,
|
...txData,
|
||||||
tokenType: 'completion',
|
tokenType: 'completion',
|
||||||
rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
|
rawAmount: completionTokens === 0 ? 0 : -Math.max(completionTokens, 0),
|
||||||
@@ -101,7 +100,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
|||||||
try {
|
try {
|
||||||
if (promptTokens) {
|
if (promptTokens) {
|
||||||
const { input = 0, write = 0, read = 0 } = promptTokens;
|
const { input = 0, write = 0, read = 0 } = promptTokens;
|
||||||
prompt = await Transaction.createStructured({
|
prompt = await createStructuredTransaction({
|
||||||
...txData,
|
...txData,
|
||||||
tokenType: 'prompt',
|
tokenType: 'prompt',
|
||||||
inputTokens: -input,
|
inputTokens: -input,
|
||||||
@@ -111,7 +110,7 @@ const spendStructuredTokens = async (txData, tokenUsage) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (completionTokens) {
|
if (completionTokens) {
|
||||||
completion = await Transaction.create({
|
completion = await createTransaction({
|
||||||
...txData,
|
...txData,
|
||||||
tokenType: 'completion',
|
tokenType: 'completion',
|
||||||
rawAmount: -completionTokens,
|
rawAmount: -completionTokens,
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
const { Transaction } = require('./Transaction');
|
|
||||||
const Balance = require('./Balance');
|
|
||||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||||
|
const { createTransaction, createAutoRefillTransaction } = require('./Transaction');
|
||||||
|
|
||||||
|
require('~/db/models');
|
||||||
|
|
||||||
// Mock the logger to prevent console output during tests
|
// Mock the logger to prevent console output during tests
|
||||||
jest.mock('~/config', () => ({
|
jest.mock('~/config', () => ({
|
||||||
@@ -19,11 +20,15 @@ jest.mock('~/server/services/Config');
|
|||||||
describe('spendTokens', () => {
|
describe('spendTokens', () => {
|
||||||
let mongoServer;
|
let mongoServer;
|
||||||
let userId;
|
let userId;
|
||||||
|
let Transaction;
|
||||||
|
let Balance;
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
mongoServer = await MongoMemoryServer.create();
|
mongoServer = await MongoMemoryServer.create();
|
||||||
const mongoUri = mongoServer.getUri();
|
await mongoose.connect(mongoServer.getUri());
|
||||||
await mongoose.connect(mongoUri);
|
|
||||||
|
Transaction = mongoose.model('Transaction');
|
||||||
|
Balance = mongoose.model('Balance');
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -197,7 +202,7 @@ describe('spendTokens', () => {
|
|||||||
// Check that the transaction records show the adjusted values
|
// Check that the transaction records show the adjusted values
|
||||||
const transactionResults = await Promise.all(
|
const transactionResults = await Promise.all(
|
||||||
transactions.map((t) =>
|
transactions.map((t) =>
|
||||||
Transaction.create({
|
createTransaction({
|
||||||
...txData,
|
...txData,
|
||||||
tokenType: t.tokenType,
|
tokenType: t.tokenType,
|
||||||
rawAmount: t.rawAmount,
|
rawAmount: t.rawAmount,
|
||||||
@@ -280,7 +285,7 @@ describe('spendTokens', () => {
|
|||||||
|
|
||||||
// Check the return values from Transaction.create directly
|
// Check the return values from Transaction.create directly
|
||||||
// This is to verify that the incrementValue is not becoming positive
|
// This is to verify that the incrementValue is not becoming positive
|
||||||
const directResult = await Transaction.create({
|
const directResult = await createTransaction({
|
||||||
user: userId,
|
user: userId,
|
||||||
conversationId: 'test-convo-3',
|
conversationId: 'test-convo-3',
|
||||||
model: 'gpt-4',
|
model: 'gpt-4',
|
||||||
@@ -607,7 +612,7 @@ describe('spendTokens', () => {
|
|||||||
const promises = [];
|
const promises = [];
|
||||||
for (let i = 0; i < numberOfRefills; i++) {
|
for (let i = 0; i < numberOfRefills; i++) {
|
||||||
promises.push(
|
promises.push(
|
||||||
Transaction.createAutoRefillTransaction({
|
createAutoRefillTransaction({
|
||||||
user: userId,
|
user: userId,
|
||||||
tokenType: 'credits',
|
tokenType: 'credits',
|
||||||
context: 'concurrent-refill-test',
|
context: 'concurrent-refill-test',
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ const tokenValues = Object.assign(
|
|||||||
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
|
||||||
'o4-mini': { prompt: 1.1, completion: 4.4 },
|
'o4-mini': { prompt: 1.1, completion: 4.4 },
|
||||||
'o3-mini': { prompt: 1.1, completion: 4.4 },
|
'o3-mini': { prompt: 1.1, completion: 4.4 },
|
||||||
o3: { prompt: 10, completion: 40 },
|
o3: { prompt: 2, completion: 8 },
|
||||||
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
||||||
'o1-preview': { prompt: 15, completion: 60 },
|
'o1-preview': { prompt: 15, completion: 60 },
|
||||||
o1: { prompt: 15, completion: 60 },
|
o1: { prompt: 15, completion: 60 },
|
||||||
|
|||||||
@@ -1,159 +1,4 @@
|
|||||||
const bcrypt = require('bcryptjs');
|
const bcrypt = require('bcryptjs');
|
||||||
const { getBalanceConfig } = require('~/server/services/Config');
|
|
||||||
const signPayload = require('~/server/services/signPayload');
|
|
||||||
const Balance = require('./Balance');
|
|
||||||
const User = require('./User');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Retrieve a user by ID and convert the found user document to a plain object.
|
|
||||||
*
|
|
||||||
* @param {string} userId - The ID of the user to find and return as a plain object.
|
|
||||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
|
||||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
|
||||||
*/
|
|
||||||
const getUserById = async function (userId, fieldsToSelect = null) {
|
|
||||||
const query = User.findById(userId);
|
|
||||||
if (fieldsToSelect) {
|
|
||||||
query.select(fieldsToSelect);
|
|
||||||
}
|
|
||||||
return await query.lean();
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Search for a single user based on partial data and return matching user document as plain object.
|
|
||||||
* @param {Partial<MongoUser>} searchCriteria - The partial data to use for searching the user.
|
|
||||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
|
||||||
* @returns {Promise<MongoUser>} A plain object representing the user document, or `null` if no user is found.
|
|
||||||
*/
|
|
||||||
const findUser = async function (searchCriteria, fieldsToSelect = null) {
|
|
||||||
const query = User.findOne(searchCriteria);
|
|
||||||
if (fieldsToSelect) {
|
|
||||||
query.select(fieldsToSelect);
|
|
||||||
}
|
|
||||||
return await query.lean();
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update a user with new data without overwriting existing properties.
|
|
||||||
*
|
|
||||||
* @param {string} userId - The ID of the user to update.
|
|
||||||
* @param {Object} updateData - An object containing the properties to update.
|
|
||||||
* @returns {Promise<MongoUser>} The updated user document as a plain object, or `null` if no user is found.
|
|
||||||
*/
|
|
||||||
const updateUser = async function (userId, updateData) {
|
|
||||||
const updateOperation = {
|
|
||||||
$set: updateData,
|
|
||||||
$unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
|
|
||||||
};
|
|
||||||
return await User.findByIdAndUpdate(userId, updateOperation, {
|
|
||||||
new: true,
|
|
||||||
runValidators: true,
|
|
||||||
}).lean();
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new user, optionally with a TTL of 1 week.
|
|
||||||
* @param {MongoUser} data - The user data to be created, must contain user_id.
|
|
||||||
* @param {boolean} [disableTTL=true] - Whether to disable the TTL. Defaults to `true`.
|
|
||||||
* @param {boolean} [returnUser=false] - Whether to return the created user object.
|
|
||||||
* @returns {Promise<ObjectId|MongoUser>} A promise that resolves to the created user document ID or user object.
|
|
||||||
* @throws {Error} If a user with the same user_id already exists.
|
|
||||||
*/
|
|
||||||
const createUser = async (data, disableTTL = true, returnUser = false) => {
|
|
||||||
const balance = await getBalanceConfig();
|
|
||||||
const userData = {
|
|
||||||
...data,
|
|
||||||
expiresAt: disableTTL ? null : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
|
||||||
};
|
|
||||||
|
|
||||||
if (disableTTL) {
|
|
||||||
delete userData.expiresAt;
|
|
||||||
}
|
|
||||||
|
|
||||||
const user = await User.create(userData);
|
|
||||||
|
|
||||||
// If balance is enabled, create or update a balance record for the user using global.interfaceConfig.balance
|
|
||||||
if (balance?.enabled && balance?.startBalance) {
|
|
||||||
const update = {
|
|
||||||
$inc: { tokenCredits: balance.startBalance },
|
|
||||||
};
|
|
||||||
|
|
||||||
if (
|
|
||||||
balance.autoRefillEnabled &&
|
|
||||||
balance.refillIntervalValue != null &&
|
|
||||||
balance.refillIntervalUnit != null &&
|
|
||||||
balance.refillAmount != null
|
|
||||||
) {
|
|
||||||
update.$set = {
|
|
||||||
autoRefillEnabled: true,
|
|
||||||
refillIntervalValue: balance.refillIntervalValue,
|
|
||||||
refillIntervalUnit: balance.refillIntervalUnit,
|
|
||||||
refillAmount: balance.refillAmount,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
await Balance.findOneAndUpdate({ user: user._id }, update, { upsert: true, new: true }).lean();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (returnUser) {
|
|
||||||
return user.toObject();
|
|
||||||
}
|
|
||||||
return user._id;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Count the number of user documents in the collection based on the provided filter.
|
|
||||||
*
|
|
||||||
* @param {Object} [filter={}] - The filter to apply when counting the documents.
|
|
||||||
* @returns {Promise<number>} The count of documents that match the filter.
|
|
||||||
*/
|
|
||||||
const countUsers = async function (filter = {}) {
|
|
||||||
return await User.countDocuments(filter);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a user by their unique ID.
|
|
||||||
*
|
|
||||||
* @param {string} userId - The ID of the user to delete.
|
|
||||||
* @returns {Promise<{ deletedCount: number }>} An object indicating the number of deleted documents.
|
|
||||||
*/
|
|
||||||
const deleteUserById = async function (userId) {
|
|
||||||
try {
|
|
||||||
const result = await User.deleteOne({ _id: userId });
|
|
||||||
if (result.deletedCount === 0) {
|
|
||||||
return { deletedCount: 0, message: 'No user found with that ID.' };
|
|
||||||
}
|
|
||||||
return { deletedCount: result.deletedCount, message: 'User was deleted successfully.' };
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error('Error deleting user: ' + error.message);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const { SESSION_EXPIRY } = process.env ?? {};
|
|
||||||
const expires = eval(SESSION_EXPIRY) ?? 1000 * 60 * 15;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a JWT token for a given user.
|
|
||||||
*
|
|
||||||
* @param {MongoUser} user - The user for whom the token is being generated.
|
|
||||||
* @returns {Promise<string>} A promise that resolves to a JWT token.
|
|
||||||
*/
|
|
||||||
const generateToken = async (user) => {
|
|
||||||
if (!user) {
|
|
||||||
throw new Error('No user provided');
|
|
||||||
}
|
|
||||||
|
|
||||||
return await signPayload({
|
|
||||||
payload: {
|
|
||||||
id: user._id,
|
|
||||||
username: user.username,
|
|
||||||
provider: user.provider,
|
|
||||||
email: user.email,
|
|
||||||
},
|
|
||||||
secret: process.env.JWT_SECRET,
|
|
||||||
expirationTime: expires / 1000,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compares the provided password with the user's password.
|
* Compares the provided password with the user's password.
|
||||||
@@ -167,6 +12,10 @@ const comparePassword = async (user, candidatePassword) => {
|
|||||||
throw new Error('No user provided');
|
throw new Error('No user provided');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!user.password) {
|
||||||
|
throw new Error('No password, likely an email first registered via Social/OIDC login');
|
||||||
|
}
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
|
bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
@@ -179,11 +28,4 @@ const comparePassword = async (user, candidatePassword) => {
|
|||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
comparePassword,
|
comparePassword,
|
||||||
deleteUserById,
|
|
||||||
generateToken,
|
|
||||||
getUserById,
|
|
||||||
countUsers,
|
|
||||||
createUser,
|
|
||||||
updateUser,
|
|
||||||
findUser,
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -34,23 +34,25 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://librechat.ai",
|
"homepage": "https://librechat.ai",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.37.0",
|
"@anthropic-ai/sdk": "^0.52.0",
|
||||||
"@aws-sdk/client-s3": "^3.758.0",
|
"@aws-sdk/client-s3": "^3.758.0",
|
||||||
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
"@aws-sdk/s3-request-presigner": "^3.758.0",
|
||||||
"@azure/identity": "^4.7.0",
|
"@azure/identity": "^4.7.0",
|
||||||
"@azure/search-documents": "^12.0.0",
|
"@azure/search-documents": "^12.0.0",
|
||||||
"@azure/storage-blob": "^12.27.0",
|
"@azure/storage-blob": "^12.27.0",
|
||||||
"@google/generative-ai": "^0.23.0",
|
"@google/generative-ai": "^0.24.0",
|
||||||
"@googleapis/youtube": "^20.0.0",
|
"@googleapis/youtube": "^20.0.0",
|
||||||
"@keyv/redis": "^4.3.3",
|
"@keyv/redis": "^4.3.3",
|
||||||
"@langchain/community": "^0.3.44",
|
"@langchain/community": "^0.3.47",
|
||||||
"@langchain/core": "^0.3.57",
|
"@langchain/core": "^0.3.60",
|
||||||
"@langchain/google-genai": "^0.2.9",
|
"@langchain/google-genai": "^0.2.13",
|
||||||
"@langchain/google-vertexai": "^0.2.9",
|
"@langchain/google-vertexai": "^0.2.13",
|
||||||
"@langchain/textsplitters": "^0.1.0",
|
"@langchain/textsplitters": "^0.1.0",
|
||||||
"@librechat/agents": "^2.4.37",
|
"@librechat/agents": "^2.4.41",
|
||||||
|
"@librechat/api": "*",
|
||||||
"@librechat/data-schemas": "*",
|
"@librechat/data-schemas": "*",
|
||||||
"@node-saml/passport-saml": "^5.0.0",
|
"@node-saml/passport-saml": "^5.0.0",
|
||||||
|
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||||
"axios": "^1.8.2",
|
"axios": "^1.8.2",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
@@ -81,15 +83,15 @@
|
|||||||
"keyv-file": "^5.1.2",
|
"keyv-file": "^5.1.2",
|
||||||
"klona": "^2.0.6",
|
"klona": "^2.0.6",
|
||||||
"librechat-data-provider": "*",
|
"librechat-data-provider": "*",
|
||||||
"librechat-mcp": "*",
|
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"meilisearch": "^0.38.0",
|
"meilisearch": "^0.38.0",
|
||||||
"memorystore": "^1.6.7",
|
"memorystore": "^1.6.7",
|
||||||
"mime": "^3.0.0",
|
"mime": "^3.0.0",
|
||||||
"module-alias": "^2.2.3",
|
"module-alias": "^2.2.3",
|
||||||
"mongoose": "^8.12.1",
|
"mongoose": "^8.12.1",
|
||||||
"multer": "^2.0.0",
|
"multer": "^2.0.1",
|
||||||
"nanoid": "^3.3.7",
|
"nanoid": "^3.3.7",
|
||||||
|
"node-fetch": "^2.7.0",
|
||||||
"nodemailer": "^6.9.15",
|
"nodemailer": "^6.9.15",
|
||||||
"ollama": "^0.5.0",
|
"ollama": "^0.5.0",
|
||||||
"openai": "^4.96.2",
|
"openai": "^4.96.2",
|
||||||
@@ -109,8 +111,9 @@
|
|||||||
"tiktoken": "^1.0.15",
|
"tiktoken": "^1.0.15",
|
||||||
"traverse": "^0.6.7",
|
"traverse": "^0.6.7",
|
||||||
"ua-parser-js": "^1.0.36",
|
"ua-parser-js": "^1.0.36",
|
||||||
|
"undici": "^7.10.0",
|
||||||
"winston": "^3.11.0",
|
"winston": "^3.11.0",
|
||||||
"winston-daily-rotate-file": "^4.7.1",
|
"winston-daily-rotate-file": "^5.0.0",
|
||||||
"youtube-transcript": "^1.2.1",
|
"youtube-transcript": "^1.2.1",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -220,6 +220,9 @@ function disposeClient(client) {
|
|||||||
if (client.maxResponseTokens) {
|
if (client.maxResponseTokens) {
|
||||||
client.maxResponseTokens = null;
|
client.maxResponseTokens = null;
|
||||||
}
|
}
|
||||||
|
if (client.processMemory) {
|
||||||
|
client.processMemory = null;
|
||||||
|
}
|
||||||
if (client.run) {
|
if (client.run) {
|
||||||
// Break circular references in run
|
// Break circular references in run
|
||||||
if (client.run.Graph) {
|
if (client.run.Graph) {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
const openIdClient = require('openid-client');
|
|
||||||
const cookies = require('cookie');
|
const cookies = require('cookie');
|
||||||
const jwt = require('jsonwebtoken');
|
const jwt = require('jsonwebtoken');
|
||||||
|
const openIdClient = require('openid-client');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
registerUser,
|
registerUser,
|
||||||
resetPassword,
|
resetPassword,
|
||||||
@@ -8,9 +9,8 @@ const {
|
|||||||
requestPasswordReset,
|
requestPasswordReset,
|
||||||
setOpenIDAuthTokens,
|
setOpenIDAuthTokens,
|
||||||
} = require('~/server/services/AuthService');
|
} = require('~/server/services/AuthService');
|
||||||
const { findSession, getUserById, deleteAllUserSessions, findUser } = require('~/models');
|
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||||
const { getOpenIdConfig } = require('~/strategies');
|
const { getOpenIdConfig } = require('~/strategies');
|
||||||
const { logger } = require('~/config');
|
|
||||||
const { isEnabled } = require('~/server/utils');
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
|
||||||
const registrationController = async (req, res) => {
|
const registrationController = async (req, res) => {
|
||||||
@@ -96,7 +96,10 @@ const refreshController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find the session with the hashed refresh token
|
// Find the session with the hashed refresh token
|
||||||
const session = await findSession({ userId: userId, refreshToken: refreshToken });
|
const session = await findSession({
|
||||||
|
userId: userId,
|
||||||
|
refreshToken: refreshToken,
|
||||||
|
});
|
||||||
|
|
||||||
if (session && session.expiration > new Date()) {
|
if (session && session.expiration > new Date()) {
|
||||||
const token = await setAuthTokens(userId, res, session._id);
|
const token = await setAuthTokens(userId, res, session._id);
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const Balance = require('~/models/Balance');
|
const { Balance } = require('~/db/models');
|
||||||
|
|
||||||
async function balanceController(req, res) {
|
async function balanceController(req, res) {
|
||||||
const balanceData = await Balance.findOne(
|
const balanceData = await Balance.findOne(
|
||||||
|
|||||||
437
api/server/controllers/PermissionsController.js
Normal file
437
api/server/controllers/PermissionsController.js
Normal file
@@ -0,0 +1,437 @@
|
|||||||
|
/**
|
||||||
|
* @import { TUpdateResourcePermissionsRequest, TUpdateResourcePermissionsResponse } from 'librechat-data-provider'
|
||||||
|
*/
|
||||||
|
|
||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const {
|
||||||
|
getAvailableRoles,
|
||||||
|
ensurePrincipalExists,
|
||||||
|
getEffectivePermissions,
|
||||||
|
ensureGroupPrincipalExists,
|
||||||
|
bulkUpdateResourcePermissions,
|
||||||
|
} = require('~/server/services/PermissionService');
|
||||||
|
const { AclEntry } = require('~/db/models');
|
||||||
|
const {
|
||||||
|
searchPrincipals: searchLocalPrincipals,
|
||||||
|
sortPrincipalsByRelevance,
|
||||||
|
calculateRelevanceScore,
|
||||||
|
} = require('~/models');
|
||||||
|
const {
|
||||||
|
searchEntraIdPrincipals,
|
||||||
|
entraIdPrincipalFeatureEnabled,
|
||||||
|
} = require('~/server/services/GraphApiService');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic controller for resource permission endpoints
|
||||||
|
* Delegates validation and logic to PermissionService
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk update permissions for a resource (grant, update, remove)
|
||||||
|
* @route PUT /api/{resourceType}/{resourceId}/permissions
|
||||||
|
* @param {Object} req - Express request object
|
||||||
|
* @param {Object} req.params - Route parameters
|
||||||
|
* @param {string} req.params.resourceType - Resource type (e.g., 'agent')
|
||||||
|
* @param {string} req.params.resourceId - Resource ID
|
||||||
|
* @param {TUpdateResourcePermissionsRequest} req.body - Request body
|
||||||
|
* @param {Object} res - Express response object
|
||||||
|
* @returns {Promise<TUpdateResourcePermissionsResponse>} Updated permissions response
|
||||||
|
*/
|
||||||
|
const updateResourcePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
/** @type {TUpdateResourcePermissionsRequest} */
|
||||||
|
const { updated, removed, public: isPublic, publicAccessRoleId } = req.body;
|
||||||
|
const { id: userId } = req.user;
|
||||||
|
|
||||||
|
// Prepare principals for the service call
|
||||||
|
const updatedPrincipals = [];
|
||||||
|
const revokedPrincipals = [];
|
||||||
|
|
||||||
|
// Add updated principals
|
||||||
|
if (updated && Array.isArray(updated)) {
|
||||||
|
updatedPrincipals.push(...updated);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add public permission if enabled
|
||||||
|
if (isPublic && publicAccessRoleId) {
|
||||||
|
updatedPrincipals.push({
|
||||||
|
type: 'public',
|
||||||
|
id: null,
|
||||||
|
accessRoleId: publicAccessRoleId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare authentication context for enhanced group member fetching
|
||||||
|
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
const accessToken =
|
||||||
|
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
|
||||||
|
const authContext =
|
||||||
|
useEntraId && accessToken
|
||||||
|
? {
|
||||||
|
accessToken,
|
||||||
|
sub: req.user.openidId,
|
||||||
|
}
|
||||||
|
: null;
|
||||||
|
|
||||||
|
// Ensure updated principals exist in the database before processing permissions
|
||||||
|
const validatedPrincipals = [];
|
||||||
|
for (const principal of updatedPrincipals) {
|
||||||
|
try {
|
||||||
|
let principalId;
|
||||||
|
|
||||||
|
if (principal.type === 'public') {
|
||||||
|
principalId = null; // Public principals don't need database records
|
||||||
|
} else if (principal.type === 'user') {
|
||||||
|
principalId = await ensurePrincipalExists(principal);
|
||||||
|
} else if (principal.type === 'group') {
|
||||||
|
// Pass authContext to enable member fetching for Entra ID groups when available
|
||||||
|
principalId = await ensureGroupPrincipalExists(principal, authContext);
|
||||||
|
} else {
|
||||||
|
logger.error(`Unsupported principal type: ${principal.type}`);
|
||||||
|
continue; // Skip invalid principal types
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the principal with the validated ID for ACL operations
|
||||||
|
validatedPrincipals.push({
|
||||||
|
...principal,
|
||||||
|
id: principalId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error ensuring principal exists:', {
|
||||||
|
principal: {
|
||||||
|
type: principal.type,
|
||||||
|
id: principal.id,
|
||||||
|
name: principal.name,
|
||||||
|
source: principal.source,
|
||||||
|
},
|
||||||
|
error: error.message,
|
||||||
|
});
|
||||||
|
// Continue with other principals instead of failing the entire operation
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add removed principals
|
||||||
|
if (removed && Array.isArray(removed)) {
|
||||||
|
revokedPrincipals.push(...removed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If public is disabled, add public to revoked list
|
||||||
|
if (!isPublic) {
|
||||||
|
revokedPrincipals.push({
|
||||||
|
type: 'public',
|
||||||
|
id: null,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = await bulkUpdateResourcePermissions({
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
updatedPrincipals: validatedPrincipals,
|
||||||
|
revokedPrincipals,
|
||||||
|
grantedBy: userId,
|
||||||
|
});
|
||||||
|
|
||||||
|
/** @type {TUpdateResourcePermissionsResponse} */
|
||||||
|
const response = {
|
||||||
|
message: 'Permissions updated successfully',
|
||||||
|
results: {
|
||||||
|
principals: results.granted,
|
||||||
|
public: isPublic || false,
|
||||||
|
publicAccessRoleId: isPublic ? publicAccessRoleId : undefined,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error updating resource permissions:', error);
|
||||||
|
res.status(400).json({
|
||||||
|
error: 'Failed to update permissions',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get principals with their permission roles for a resource (UI-friendly format)
|
||||||
|
* Uses efficient aggregation pipeline to join User/Group data in single query
|
||||||
|
* @route GET /api/permissions/{resourceType}/{resourceId}
|
||||||
|
*/
|
||||||
|
const getResourcePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
|
||||||
|
// Use aggregation pipeline for efficient single-query data retrieval
|
||||||
|
const results = await AclEntry.aggregate([
|
||||||
|
// Match ACL entries for this resource
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
resourceType,
|
||||||
|
resourceId: mongoose.Types.ObjectId.isValid(resourceId)
|
||||||
|
? mongoose.Types.ObjectId.createFromHexString(resourceId)
|
||||||
|
: resourceId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup AccessRole information
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'accessroles',
|
||||||
|
localField: 'roleId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'role',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup User information (for user principals)
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'users',
|
||||||
|
localField: 'principalId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'userInfo',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Lookup Group information (for group principals)
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: 'groups',
|
||||||
|
localField: 'principalId',
|
||||||
|
foreignField: '_id',
|
||||||
|
as: 'groupInfo',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Project final structure
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
principalType: 1,
|
||||||
|
principalId: 1,
|
||||||
|
accessRoleId: { $arrayElemAt: ['$role.accessRoleId', 0] },
|
||||||
|
userInfo: { $arrayElemAt: ['$userInfo', 0] },
|
||||||
|
groupInfo: { $arrayElemAt: ['$groupInfo', 0] },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const principals = [];
|
||||||
|
let publicPermission = null;
|
||||||
|
|
||||||
|
// Process aggregation results
|
||||||
|
for (const result of results) {
|
||||||
|
if (result.principalType === 'public') {
|
||||||
|
publicPermission = {
|
||||||
|
public: true,
|
||||||
|
publicAccessRoleId: result.accessRoleId,
|
||||||
|
};
|
||||||
|
} else if (result.principalType === 'user' && result.userInfo) {
|
||||||
|
principals.push({
|
||||||
|
type: 'user',
|
||||||
|
id: result.userInfo._id.toString(),
|
||||||
|
name: result.userInfo.name || result.userInfo.username,
|
||||||
|
email: result.userInfo.email,
|
||||||
|
avatar: result.userInfo.avatar,
|
||||||
|
source: !result.userInfo._id ? 'entra' : 'local',
|
||||||
|
idOnTheSource: result.userInfo.idOnTheSource || result.userInfo._id.toString(),
|
||||||
|
accessRoleId: result.accessRoleId,
|
||||||
|
});
|
||||||
|
} else if (result.principalType === 'group' && result.groupInfo) {
|
||||||
|
principals.push({
|
||||||
|
type: 'group',
|
||||||
|
id: result.groupInfo._id.toString(),
|
||||||
|
name: result.groupInfo.name,
|
||||||
|
email: result.groupInfo.email,
|
||||||
|
description: result.groupInfo.description,
|
||||||
|
avatar: result.groupInfo.avatar,
|
||||||
|
source: result.groupInfo.source || 'local',
|
||||||
|
idOnTheSource: result.groupInfo.idOnTheSource || result.groupInfo._id.toString(),
|
||||||
|
accessRoleId: result.accessRoleId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return response in format expected by frontend
|
||||||
|
const response = {
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
principals,
|
||||||
|
public: publicPermission?.public || false,
|
||||||
|
...(publicPermission?.publicAccessRoleId && {
|
||||||
|
publicAccessRoleId: publicPermission.publicAccessRoleId,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
res.status(200).json(response);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting resource permissions principals:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get permissions principals',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available roles for a resource type
|
||||||
|
* @route GET /api/{resourceType}/roles
|
||||||
|
*/
|
||||||
|
const getResourceRoles = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType } = req.params;
|
||||||
|
|
||||||
|
const roles = await getAvailableRoles({ resourceType });
|
||||||
|
|
||||||
|
res.status(200).json(
|
||||||
|
roles.map((role) => ({
|
||||||
|
accessRoleId: role.accessRoleId,
|
||||||
|
name: role.name,
|
||||||
|
description: role.description,
|
||||||
|
permBits: role.permBits,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting resource roles:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get roles',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get user's effective permission bitmask for a resource
|
||||||
|
* @route GET /api/{resourceType}/{resourceId}/effective
|
||||||
|
*/
|
||||||
|
const getUserEffectivePermissions = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { resourceType, resourceId } = req.params;
|
||||||
|
const { id: userId } = req.user;
|
||||||
|
|
||||||
|
const permissionBits = await getEffectivePermissions({
|
||||||
|
userId,
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
});
|
||||||
|
|
||||||
|
res.status(200).json({
|
||||||
|
permissionBits,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting user effective permissions:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to get effective permissions',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search for users and groups to grant permissions
|
||||||
|
* Supports hybrid local database + Entra ID search when configured
|
||||||
|
* @route GET /api/permissions/search-principals
|
||||||
|
*/
|
||||||
|
const searchPrincipals = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { q: query, limit = 20, type } = req.query;
|
||||||
|
|
||||||
|
if (!query || query.trim().length === 0) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Query parameter "q" is required and must not be empty',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.trim().length < 2) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Query must be at least 2 characters long',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchLimit = Math.min(Math.max(1, parseInt(limit) || 10), 50);
|
||||||
|
const typeFilter = ['user', 'group'].includes(type) ? type : null;
|
||||||
|
|
||||||
|
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilter);
|
||||||
|
let allPrincipals = [...localResults];
|
||||||
|
|
||||||
|
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
|
||||||
|
|
||||||
|
if (useEntraId && localResults.length < searchLimit) {
|
||||||
|
try {
|
||||||
|
const graphTypeMap = {
|
||||||
|
user: 'users',
|
||||||
|
group: 'groups',
|
||||||
|
null: 'all',
|
||||||
|
};
|
||||||
|
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
const accessToken =
|
||||||
|
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
|
||||||
|
|
||||||
|
if (accessToken) {
|
||||||
|
const graphResults = await searchEntraIdPrincipals(
|
||||||
|
accessToken,
|
||||||
|
req.user.openidId,
|
||||||
|
query.trim(),
|
||||||
|
graphTypeMap[typeFilter],
|
||||||
|
searchLimit - localResults.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
const localEmails = new Set(
|
||||||
|
localResults.map((p) => p.email?.toLowerCase()).filter(Boolean),
|
||||||
|
);
|
||||||
|
const localGroupSourceIds = new Set(
|
||||||
|
localResults.map((p) => p.idOnTheSource).filter(Boolean),
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const principal of graphResults) {
|
||||||
|
const isDuplicateByEmail =
|
||||||
|
principal.email && localEmails.has(principal.email.toLowerCase());
|
||||||
|
const isDuplicateBySourceId =
|
||||||
|
principal.idOnTheSource && localGroupSourceIds.has(principal.idOnTheSource);
|
||||||
|
|
||||||
|
if (!isDuplicateByEmail && !isDuplicateBySourceId) {
|
||||||
|
allPrincipals.push(principal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (graphError) {
|
||||||
|
logger.warn('Graph API search failed, falling back to local results:', graphError.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const scoredResults = allPrincipals.map((item) => ({
|
||||||
|
...item,
|
||||||
|
_searchScore: calculateRelevanceScore(item, query.trim()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
allPrincipals = sortPrincipalsByRelevance(scoredResults)
|
||||||
|
.slice(0, searchLimit)
|
||||||
|
.map((result) => {
|
||||||
|
const { _searchScore, ...resultWithoutScore } = result;
|
||||||
|
return resultWithoutScore;
|
||||||
|
});
|
||||||
|
res.status(200).json({
|
||||||
|
query: query.trim(),
|
||||||
|
limit: searchLimit,
|
||||||
|
type: typeFilter,
|
||||||
|
results: allPrincipals,
|
||||||
|
count: allPrincipals.length,
|
||||||
|
sources: {
|
||||||
|
local: allPrincipals.filter((r) => r.source === 'local').length,
|
||||||
|
entra: allPrincipals.filter((r) => r.source === 'entra').length,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error searching principals:', error);
|
||||||
|
res.status(500).json({
|
||||||
|
error: 'Failed to search principals',
|
||||||
|
details: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
updateResourcePermissions,
|
||||||
|
getResourcePermissions,
|
||||||
|
getResourceRoles,
|
||||||
|
getUserEffectivePermissions,
|
||||||
|
searchPrincipals,
|
||||||
|
};
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { CacheKeys, AuthType } = require('librechat-data-provider');
|
const { CacheKeys, AuthType } = require('librechat-data-provider');
|
||||||
|
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||||
const { getToolkitKey } = require('~/server/services/ToolService');
|
const { getToolkitKey } = require('~/server/services/ToolService');
|
||||||
const { getCustomConfig } = require('~/server/services/Config');
|
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||||
const { availableTools } = require('~/app/clients/tools');
|
const { availableTools } = require('~/app/clients/tools');
|
||||||
const { getMCPManager } = require('~/config');
|
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
|
const { Constants } = require('librechat-data-provider');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Filters out duplicate plugins from the list of plugins.
|
* Filters out duplicate plugins from the list of plugins.
|
||||||
@@ -84,6 +86,45 @@ const getAvailablePluginsController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function createServerToolsCallback() {
|
||||||
|
/**
|
||||||
|
* @param {string} serverName
|
||||||
|
* @param {TPlugin[] | null} serverTools
|
||||||
|
*/
|
||||||
|
return async function (serverName, serverTools) {
|
||||||
|
try {
|
||||||
|
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||||
|
if (!serverName || !mcpToolsCache) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await mcpToolsCache.set(serverName, serverTools);
|
||||||
|
logger.debug(`MCP tools for ${serverName} added to cache.`);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error retrieving MCP tools from cache:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createGetServerTools() {
|
||||||
|
/**
|
||||||
|
* Retrieves cached server tools
|
||||||
|
* @param {string} serverName
|
||||||
|
* @returns {Promise<TPlugin[] | null>}
|
||||||
|
*/
|
||||||
|
return async function (serverName) {
|
||||||
|
try {
|
||||||
|
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||||
|
if (!mcpToolsCache) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return await mcpToolsCache.get(serverName);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error retrieving MCP tools from cache:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
||||||
*
|
*
|
||||||
@@ -109,7 +150,16 @@ const getAvailableTools = async (req, res) => {
|
|||||||
const customConfig = await getCustomConfig();
|
const customConfig = await getCustomConfig();
|
||||||
if (customConfig?.mcpServers != null) {
|
if (customConfig?.mcpServers != null) {
|
||||||
const mcpManager = getMCPManager();
|
const mcpManager = getMCPManager();
|
||||||
pluginManifest = await mcpManager.loadManifestTools(pluginManifest);
|
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||||
|
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
||||||
|
const serverToolsCallback = createServerToolsCallback();
|
||||||
|
const getServerTools = createGetServerTools();
|
||||||
|
const mcpTools = await mcpManager.loadManifestTools({
|
||||||
|
flowManager,
|
||||||
|
serverToolsCallback,
|
||||||
|
getServerTools,
|
||||||
|
});
|
||||||
|
pluginManifest = [...mcpTools, ...pluginManifest];
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @type {TPlugin[]} */
|
/** @type {TPlugin[]} */
|
||||||
@@ -123,17 +173,57 @@ const getAvailableTools = async (req, res) => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const toolDefinitions = req.app.locals.availableTools;
|
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||||
const tools = authenticatedPlugins.filter(
|
|
||||||
(plugin) =>
|
|
||||||
toolDefinitions[plugin.pluginKey] !== undefined ||
|
|
||||||
(plugin.toolkit === true &&
|
|
||||||
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey)),
|
|
||||||
);
|
|
||||||
|
|
||||||
await cache.set(CacheKeys.TOOLS, tools);
|
const toolsOutput = [];
|
||||||
res.status(200).json(tools);
|
for (const plugin of authenticatedPlugins) {
|
||||||
|
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
|
||||||
|
const isToolkit =
|
||||||
|
plugin.toolkit === true &&
|
||||||
|
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey);
|
||||||
|
|
||||||
|
if (!isToolDefined && !isToolkit) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolToAdd = { ...plugin };
|
||||||
|
|
||||||
|
if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
||||||
|
toolsOutput.push(toolToAdd);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
||||||
|
const serverName = parts[parts.length - 1];
|
||||||
|
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||||
|
|
||||||
|
if (!serverConfig?.customUserVars) {
|
||||||
|
toolsOutput.push(toolToAdd);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||||
|
|
||||||
|
if (customVarKeys.length === 0) {
|
||||||
|
toolToAdd.authConfig = [];
|
||||||
|
toolToAdd.authenticated = true;
|
||||||
|
} else {
|
||||||
|
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||||
|
authField: key,
|
||||||
|
label: value.title || key,
|
||||||
|
description: value.description || '',
|
||||||
|
}));
|
||||||
|
toolToAdd.authenticated = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
toolsOutput.push(toolToAdd);
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalTools = filterUniquePlugins(toolsOutput);
|
||||||
|
await cache.set(CacheKeys.TOOLS, finalTools);
|
||||||
|
res.status(200).json(finalTools);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error('[getAvailableTools]', error);
|
||||||
res.status(500).json({ message: error.message });
|
res.status(500).json({ message: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
|
const { encryptV3 } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
|
verifyTOTP,
|
||||||
|
getTOTPSecret,
|
||||||
|
verifyBackupCode,
|
||||||
generateTOTPSecret,
|
generateTOTPSecret,
|
||||||
generateBackupCodes,
|
generateBackupCodes,
|
||||||
verifyTOTP,
|
|
||||||
verifyBackupCode,
|
|
||||||
getTOTPSecret,
|
|
||||||
} = require('~/server/services/twoFactorService');
|
} = require('~/server/services/twoFactorService');
|
||||||
const { updateUser, getUserById } = require('~/models');
|
const { getUserById, updateUser } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
|
||||||
const { encryptV3 } = require('~/server/utils/crypto');
|
|
||||||
|
|
||||||
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
|
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
|
||||||
|
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ const {
|
|||||||
webSearchKeys,
|
webSearchKeys,
|
||||||
extractWebSearchEnvVars,
|
extractWebSearchEnvVars,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
Balance,
|
|
||||||
getFiles,
|
getFiles,
|
||||||
updateUser,
|
updateUser,
|
||||||
deleteFiles,
|
deleteFiles,
|
||||||
@@ -16,16 +16,15 @@ const {
|
|||||||
deleteUserById,
|
deleteUserById,
|
||||||
deleteAllUserSessions,
|
deleteAllUserSessions,
|
||||||
} = require('~/models');
|
} = require('~/models');
|
||||||
const User = require('~/models/User');
|
|
||||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||||
const { deleteAllSharedLinks } = require('~/models/Share');
|
const { Transaction, Balance, User } = require('~/db/models');
|
||||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||||
const { Transaction } = require('~/models/Transaction');
|
const { deleteAllSharedLinks } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
const { getMCPManager } = require('~/config');
|
||||||
|
|
||||||
const getUserController = async (req, res) => {
|
const getUserController = async (req, res) => {
|
||||||
/** @type {MongoUser} */
|
/** @type {MongoUser} */
|
||||||
@@ -105,10 +104,22 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let keys = Object.keys(auth);
|
let keys = Object.keys(auth);
|
||||||
if (keys.length === 0 && pluginKey !== Tools.web_search) {
|
const values = Object.values(auth); // Used in 'install' block
|
||||||
|
|
||||||
|
const isMCPTool = pluginKey.startsWith('mcp_') || pluginKey.includes(Constants.mcp_delimiter);
|
||||||
|
|
||||||
|
// Early exit condition:
|
||||||
|
// If keys are empty (meaning auth: {} was likely sent for uninstall, or auth was empty for install)
|
||||||
|
// AND it's not web_search (which has special key handling to populate `keys` for uninstall)
|
||||||
|
// AND it's NOT (an uninstall action FOR an MCP tool - we need to proceed for this case to clear all its auth)
|
||||||
|
// THEN return.
|
||||||
|
if (
|
||||||
|
keys.length === 0 &&
|
||||||
|
pluginKey !== Tools.web_search &&
|
||||||
|
!(action === 'uninstall' && isMCPTool)
|
||||||
|
) {
|
||||||
return res.status(200).send();
|
return res.status(200).send();
|
||||||
}
|
}
|
||||||
const values = Object.values(auth);
|
|
||||||
|
|
||||||
/** @type {number} */
|
/** @type {number} */
|
||||||
let status = 200;
|
let status = 200;
|
||||||
@@ -135,16 +146,53 @@ const updateUserPluginsController = async (req, res) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (action === 'uninstall') {
|
} else if (action === 'uninstall') {
|
||||||
for (let i = 0; i < keys.length; i++) {
|
// const isMCPTool was defined earlier
|
||||||
authService = await deleteUserPluginAuth(user.id, keys[i]);
|
if (isMCPTool && keys.length === 0) {
|
||||||
|
// This handles the case where auth: {} is sent for an MCP tool uninstall.
|
||||||
|
// It means "delete all credentials associated with this MCP pluginKey".
|
||||||
|
authService = await deleteUserPluginAuth(user.id, null, true, pluginKey);
|
||||||
if (authService instanceof Error) {
|
if (authService instanceof Error) {
|
||||||
logger.error('[authService]', authService);
|
logger.error(
|
||||||
|
`[authService] Error deleting all auth for MCP tool ${pluginKey}:`,
|
||||||
|
authService,
|
||||||
|
);
|
||||||
({ status, message } = authService);
|
({ status, message } = authService);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// This handles:
|
||||||
|
// 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}).
|
||||||
|
// 2. Other tools uninstall (if keys were provided).
|
||||||
|
// 3. MCP tool uninstall if specific keys were provided in `auth` (not current frontend behavior).
|
||||||
|
// If keys is empty for non-MCP tools (and not web_search), this loop won't run, and nothing is deleted.
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name
|
||||||
|
if (authService instanceof Error) {
|
||||||
|
logger.error('[authService] Error deleting specific auth key:', authService);
|
||||||
|
({ status, message } = authService);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (status === 200) {
|
if (status === 200) {
|
||||||
|
// If auth was updated successfully, disconnect MCP sessions as they might use these credentials
|
||||||
|
if (pluginKey.startsWith(Constants.mcp_prefix)) {
|
||||||
|
try {
|
||||||
|
const mcpManager = getMCPManager(user.id);
|
||||||
|
if (mcpManager) {
|
||||||
|
logger.info(
|
||||||
|
`[updateUserPluginsController] Disconnecting MCP connections for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
||||||
|
);
|
||||||
|
await mcpManager.disconnectUserConnections(user.id);
|
||||||
|
}
|
||||||
|
} catch (disconnectError) {
|
||||||
|
logger.error(
|
||||||
|
`[updateUserPluginsController] Error disconnecting MCP connections for user ${user.id} after plugin auth update:`,
|
||||||
|
disconnectError,
|
||||||
|
);
|
||||||
|
// Do not fail the request for this, but log it.
|
||||||
|
}
|
||||||
|
}
|
||||||
return res.status(status).send();
|
return res.status(status).send();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -166,7 +214,11 @@ const deleteUserController = async (req, res) => {
|
|||||||
await Balance.deleteMany({ user: user._id }); // delete user balances
|
await Balance.deleteMany({ user: user._id }); // delete user balances
|
||||||
await deletePresets(user.id); // delete user presets
|
await deletePresets(user.id); // delete user presets
|
||||||
/* TODO: Delete Assistant Threads */
|
/* TODO: Delete Assistant Threads */
|
||||||
await deleteConvos(user.id); // delete user convos
|
try {
|
||||||
|
await deleteConvos(user.id); // delete user convos
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[deleteUserController] Error deleting user convos, likely no convos', error);
|
||||||
|
}
|
||||||
await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth
|
await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth
|
||||||
await deleteUserById(user.id); // delete user
|
await deleteUserById(user.id); // delete user
|
||||||
await deleteAllSharedLinks(user.id); // delete user shared links
|
await deleteAllSharedLinks(user.id); // delete user shared links
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
const { nanoid } = require('nanoid');
|
const { nanoid } = require('nanoid');
|
||||||
|
const { sendEvent } = require('@librechat/api');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { Tools, StepTypes, FileContext } = require('librechat-data-provider');
|
const { Tools, StepTypes, FileContext } = require('librechat-data-provider');
|
||||||
const {
|
const {
|
||||||
EnvVar,
|
EnvVar,
|
||||||
@@ -12,7 +14,6 @@ const {
|
|||||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||||
const { logger, sendEvent } = require('~/config');
|
|
||||||
|
|
||||||
class ModelEndHandler {
|
class ModelEndHandler {
|
||||||
/**
|
/**
|
||||||
@@ -240,9 +241,7 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
|||||||
if (output.artifact[Tools.web_search]) {
|
if (output.artifact[Tools.web_search]) {
|
||||||
artifactPromises.push(
|
artifactPromises.push(
|
||||||
(async () => {
|
(async () => {
|
||||||
const name = `${output.name}_${output.tool_call_id}_${nanoid()}`;
|
|
||||||
const attachment = {
|
const attachment = {
|
||||||
name,
|
|
||||||
type: Tools.web_search,
|
type: Tools.web_search,
|
||||||
messageId: metadata.run_id,
|
messageId: metadata.run_id,
|
||||||
toolCallId: output.tool_call_id,
|
toolCallId: output.tool_call_id,
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
// const { HttpsProxyAgent } = require('https-proxy-agent');
|
|
||||||
// const {
|
|
||||||
// Constants,
|
|
||||||
// ImageDetail,
|
|
||||||
// EModelEndpoint,
|
|
||||||
// resolveHeaders,
|
|
||||||
// validateVisionModel,
|
|
||||||
// mapModelToAzureConfig,
|
|
||||||
// } = require('librechat-data-provider');
|
|
||||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const {
|
||||||
|
sendEvent,
|
||||||
|
createRun,
|
||||||
|
Tokenizer,
|
||||||
|
memoryInstructions,
|
||||||
|
createMemoryProcessor,
|
||||||
|
} = require('@librechat/api');
|
||||||
const {
|
const {
|
||||||
Callback,
|
Callback,
|
||||||
GraphEvents,
|
GraphEvents,
|
||||||
@@ -19,25 +18,34 @@ const {
|
|||||||
} = require('@librechat/agents');
|
} = require('@librechat/agents');
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
|
Permissions,
|
||||||
VisionModes,
|
VisionModes,
|
||||||
ContentTypes,
|
ContentTypes,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
KnownEndpoints,
|
KnownEndpoints,
|
||||||
|
PermissionTypes,
|
||||||
isAgentsEndpoint,
|
isAgentsEndpoint,
|
||||||
AgentCapabilities,
|
AgentCapabilities,
|
||||||
bedrockInputSchema,
|
bedrockInputSchema,
|
||||||
removeNullishValues,
|
removeNullishValues,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { getCustomEndpointConfig, checkCapability } = require('~/server/services/Config');
|
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
|
||||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
|
||||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||||
|
const {
|
||||||
|
getCustomEndpointConfig,
|
||||||
|
createGetMCPAuthMap,
|
||||||
|
checkCapability,
|
||||||
|
} = require('~/server/services/Config');
|
||||||
|
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||||
|
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||||
|
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||||
|
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||||
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
|
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
|
||||||
const Tokenizer = require('~/server/services/Tokenizer');
|
const { checkAccess } = require('~/server/middleware/roles/access');
|
||||||
const BaseClient = require('~/app/clients/BaseClient');
|
const BaseClient = require('~/app/clients/BaseClient');
|
||||||
const { logger, sendEvent } = require('~/config');
|
const { loadAgent } = require('~/models/Agent');
|
||||||
const { createRun } = require('./run');
|
const { getMCPManager } = require('~/config');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {ServerRequest} req
|
* @param {ServerRequest} req
|
||||||
@@ -57,12 +65,8 @@ const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deep
|
|||||||
|
|
||||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||||
|
|
||||||
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
|
|
||||||
// const { getFormattedMemories } = require('~/models/Memory');
|
|
||||||
// const { getCurrentDateTime } = require('~/utils');
|
|
||||||
|
|
||||||
function createTokenCounter(encoding) {
|
function createTokenCounter(encoding) {
|
||||||
return (message) => {
|
return function (message) {
|
||||||
const countTokens = (text) => Tokenizer.getTokenCount(text, encoding);
|
const countTokens = (text) => Tokenizer.getTokenCount(text, encoding);
|
||||||
return getTokenCountForMessage(message, countTokens);
|
return getTokenCountForMessage(message, countTokens);
|
||||||
};
|
};
|
||||||
@@ -123,6 +127,8 @@ class AgentClient extends BaseClient {
|
|||||||
this.usage;
|
this.usage;
|
||||||
/** @type {Record<string, number>} */
|
/** @type {Record<string, number>} */
|
||||||
this.indexTokenCountMap = {};
|
this.indexTokenCountMap = {};
|
||||||
|
/** @type {(messages: BaseMessage[]) => Promise<void>} */
|
||||||
|
this.processMemory;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -137,55 +143,10 @@ class AgentClient extends BaseClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* `AgentClient` is not opinionated about vision requests, so we don't do anything here
|
||||||
* Checks if the model is a vision model based on request attachments and sets the appropriate options:
|
|
||||||
* - Sets `this.modelOptions.model` to `gpt-4-vision-preview` if the request is a vision request.
|
|
||||||
* - Sets `this.isVisionModel` to `true` if vision request.
|
|
||||||
* - Deletes `this.modelOptions.stop` if vision request.
|
|
||||||
* @param {MongoFile[]} attachments
|
* @param {MongoFile[]} attachments
|
||||||
*/
|
*/
|
||||||
checkVisionRequest(attachments) {
|
checkVisionRequest() {}
|
||||||
// if (!attachments) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// const availableModels = this.options.modelsConfig?.[this.options.endpoint];
|
|
||||||
// if (!availableModels) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// let visionRequestDetected = false;
|
|
||||||
// for (const file of attachments) {
|
|
||||||
// if (file?.type?.includes('image')) {
|
|
||||||
// visionRequestDetected = true;
|
|
||||||
// break;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// if (!visionRequestDetected) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
|
|
||||||
// if (this.isVisionModel) {
|
|
||||||
// delete this.modelOptions.stop;
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// for (const model of availableModels) {
|
|
||||||
// if (!validateVisionModel({ model, availableModels })) {
|
|
||||||
// continue;
|
|
||||||
// }
|
|
||||||
// this.modelOptions.model = model;
|
|
||||||
// this.isVisionModel = true;
|
|
||||||
// delete this.modelOptions.stop;
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// if (!availableModels.includes(this.defaultVisionModel)) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// if (!validateVisionModel({ model: this.defaultVisionModel, availableModels })) {
|
|
||||||
// return;
|
|
||||||
// }
|
|
||||||
// this.modelOptions.model = this.defaultVisionModel;
|
|
||||||
// this.isVisionModel = true;
|
|
||||||
// delete this.modelOptions.stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
getSaveOptions() {
|
getSaveOptions() {
|
||||||
// TODO:
|
// TODO:
|
||||||
@@ -269,24 +230,6 @@ class AgentClient extends BaseClient {
|
|||||||
.filter(Boolean)
|
.filter(Boolean)
|
||||||
.join('\n')
|
.join('\n')
|
||||||
.trim();
|
.trim();
|
||||||
// this.systemMessage = getCurrentDateTime();
|
|
||||||
// const { withKeys, withoutKeys } = await getFormattedMemories({
|
|
||||||
// userId: this.options.req.user.id,
|
|
||||||
// });
|
|
||||||
// processMemory({
|
|
||||||
// userId: this.options.req.user.id,
|
|
||||||
// message: this.options.req.body.text,
|
|
||||||
// parentMessageId,
|
|
||||||
// memory: withKeys,
|
|
||||||
// thread_id: this.conversationId,
|
|
||||||
// }).catch((error) => {
|
|
||||||
// logger.error('Memory Agent failed to process memory', error);
|
|
||||||
// });
|
|
||||||
|
|
||||||
// this.systemMessage += '\n\n' + memoryInstructions;
|
|
||||||
// if (withoutKeys) {
|
|
||||||
// this.systemMessage += `\n\n# Existing memory about the user:\n${withoutKeys}`;
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (this.options.attachments) {
|
if (this.options.attachments) {
|
||||||
const attachments = await this.options.attachments;
|
const attachments = await this.options.attachments;
|
||||||
@@ -370,6 +313,37 @@ class AgentClient extends BaseClient {
|
|||||||
systemContent = this.augmentedPrompt + systemContent;
|
systemContent = this.augmentedPrompt + systemContent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Inject MCP server instructions if available
|
||||||
|
const ephemeralAgent = this.options.req.body.ephemeralAgent;
|
||||||
|
let mcpServers = [];
|
||||||
|
|
||||||
|
// Check for ephemeral agent MCP servers
|
||||||
|
if (ephemeralAgent && ephemeralAgent.mcp && ephemeralAgent.mcp.length > 0) {
|
||||||
|
mcpServers = ephemeralAgent.mcp;
|
||||||
|
}
|
||||||
|
// Check for regular agent MCP tools
|
||||||
|
else if (this.options.agent && this.options.agent.tools) {
|
||||||
|
mcpServers = this.options.agent.tools
|
||||||
|
.filter(
|
||||||
|
(tool) =>
|
||||||
|
tool instanceof DynamicStructuredTool && tool.name.includes(Constants.mcp_delimiter),
|
||||||
|
)
|
||||||
|
.map((tool) => tool.name.split(Constants.mcp_delimiter).pop())
|
||||||
|
.filter(Boolean);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mcpServers.length > 0) {
|
||||||
|
try {
|
||||||
|
const mcpInstructions = getMCPManager().formatInstructionsForContext(mcpServers);
|
||||||
|
if (mcpInstructions) {
|
||||||
|
systemContent = [systemContent, mcpInstructions].filter(Boolean).join('\n\n');
|
||||||
|
logger.debug('[AgentClient] Injected MCP instructions for servers:', mcpServers);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[AgentClient] Failed to inject MCP instructions:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (systemContent) {
|
if (systemContent) {
|
||||||
this.options.agent.instructions = systemContent;
|
this.options.agent.instructions = systemContent;
|
||||||
}
|
}
|
||||||
@@ -399,9 +373,150 @@ class AgentClient extends BaseClient {
|
|||||||
opts.getReqData({ promptTokens });
|
opts.getReqData({ promptTokens });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const withoutKeys = await this.useMemory();
|
||||||
|
if (withoutKeys) {
|
||||||
|
systemContent += `${memoryInstructions}\n\n# Existing memory about the user:\n${withoutKeys}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (systemContent) {
|
||||||
|
this.options.agent.instructions = systemContent;
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns {Promise<string | undefined>}
|
||||||
|
*/
|
||||||
|
async useMemory() {
|
||||||
|
const user = this.options.req.user;
|
||||||
|
if (user.personalization?.memories === false) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const hasAccess = await checkAccess(user, PermissionTypes.MEMORIES, [Permissions.USE]);
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.debug(
|
||||||
|
`[api/server/controllers/agents/client.js #useMemory] User ${user.id} does not have USE permission for memories`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
/** @type {TCustomConfig['memory']} */
|
||||||
|
const memoryConfig = this.options.req?.app?.locals?.memory;
|
||||||
|
if (!memoryConfig || memoryConfig.disabled === true) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {Agent} */
|
||||||
|
let prelimAgent;
|
||||||
|
const allowedProviders = new Set(
|
||||||
|
this.options.req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders,
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
if (memoryConfig.agent?.id != null && memoryConfig.agent.id !== this.options.agent.id) {
|
||||||
|
prelimAgent = await loadAgent({
|
||||||
|
req: this.options.req,
|
||||||
|
agent_id: memoryConfig.agent.id,
|
||||||
|
endpoint: EModelEndpoint.agents,
|
||||||
|
});
|
||||||
|
} else if (
|
||||||
|
memoryConfig.agent?.id == null &&
|
||||||
|
memoryConfig.agent?.model != null &&
|
||||||
|
memoryConfig.agent?.provider != null
|
||||||
|
) {
|
||||||
|
prelimAgent = { id: Constants.EPHEMERAL_AGENT_ID, ...memoryConfig.agent };
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
'[api/server/controllers/agents/client.js #useMemory] Error loading agent for memory',
|
||||||
|
error,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const agent = await initializeAgent({
|
||||||
|
req: this.options.req,
|
||||||
|
res: this.options.res,
|
||||||
|
agent: prelimAgent,
|
||||||
|
allowedProviders,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!agent) {
|
||||||
|
logger.warn(
|
||||||
|
'[api/server/controllers/agents/client.js #useMemory] No agent found for memory',
|
||||||
|
memoryConfig,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const llmConfig = Object.assign(
|
||||||
|
{
|
||||||
|
provider: agent.provider,
|
||||||
|
model: agent.model,
|
||||||
|
},
|
||||||
|
agent.model_parameters,
|
||||||
|
);
|
||||||
|
|
||||||
|
/** @type {import('@librechat/api').MemoryConfig} */
|
||||||
|
const config = {
|
||||||
|
validKeys: memoryConfig.validKeys,
|
||||||
|
instructions: agent.instructions,
|
||||||
|
llmConfig,
|
||||||
|
tokenLimit: memoryConfig.tokenLimit,
|
||||||
|
};
|
||||||
|
|
||||||
|
const userId = this.options.req.user.id + '';
|
||||||
|
const messageId = this.responseMessageId + '';
|
||||||
|
const conversationId = this.conversationId + '';
|
||||||
|
const [withoutKeys, processMemory] = await createMemoryProcessor({
|
||||||
|
userId,
|
||||||
|
config,
|
||||||
|
messageId,
|
||||||
|
conversationId,
|
||||||
|
memoryMethods: {
|
||||||
|
setMemory,
|
||||||
|
deleteMemory,
|
||||||
|
getFormattedMemories,
|
||||||
|
},
|
||||||
|
res: this.options.res,
|
||||||
|
});
|
||||||
|
|
||||||
|
this.processMemory = processMemory;
|
||||||
|
return withoutKeys;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {BaseMessage[]} messages
|
||||||
|
* @returns {Promise<void | (TAttachment | null)[]>}
|
||||||
|
*/
|
||||||
|
async runMemory(messages) {
|
||||||
|
try {
|
||||||
|
if (this.processMemory == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
/** @type {TCustomConfig['memory']} */
|
||||||
|
const memoryConfig = this.options.req?.app?.locals?.memory;
|
||||||
|
const messageWindowSize = memoryConfig?.messageWindowSize ?? 5;
|
||||||
|
|
||||||
|
let messagesToProcess = [...messages];
|
||||||
|
if (messages.length > messageWindowSize) {
|
||||||
|
for (let i = messages.length - messageWindowSize; i >= 0; i--) {
|
||||||
|
const potentialWindow = messages.slice(i, i + messageWindowSize);
|
||||||
|
if (potentialWindow[0]?.role === 'user') {
|
||||||
|
messagesToProcess = [...potentialWindow];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (messagesToProcess.length === messages.length) {
|
||||||
|
messagesToProcess = [...messages.slice(-messageWindowSize)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await this.processMemory(messagesToProcess);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Memory Agent failed to process memory', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** @type {sendCompletion} */
|
/** @type {sendCompletion} */
|
||||||
async sendCompletion(payload, opts = {}) {
|
async sendCompletion(payload, opts = {}) {
|
||||||
await this.chatCompletion({
|
await this.chatCompletion({
|
||||||
@@ -544,100 +659,13 @@ class AgentClient extends BaseClient {
|
|||||||
let config;
|
let config;
|
||||||
/** @type {ReturnType<createRun>} */
|
/** @type {ReturnType<createRun>} */
|
||||||
let run;
|
let run;
|
||||||
|
/** @type {Promise<(TAttachment | null)[] | undefined>} */
|
||||||
|
let memoryPromise;
|
||||||
try {
|
try {
|
||||||
if (!abortController) {
|
if (!abortController) {
|
||||||
abortController = new AbortController();
|
abortController = new AbortController();
|
||||||
}
|
}
|
||||||
|
|
||||||
// if (this.options.headers) {
|
|
||||||
// opts.defaultHeaders = { ...opts.defaultHeaders, ...this.options.headers };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (this.options.proxy) {
|
|
||||||
// opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (this.isVisionModel) {
|
|
||||||
// modelOptions.max_tokens = 4000;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /** @type {TAzureConfig | undefined} */
|
|
||||||
// const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
|
|
||||||
|
|
||||||
// if (
|
|
||||||
// (this.azure && this.isVisionModel && azureConfig) ||
|
|
||||||
// (azureConfig && this.isVisionModel && this.options.endpoint === EModelEndpoint.azureOpenAI)
|
|
||||||
// ) {
|
|
||||||
// const { modelGroupMap, groupMap } = azureConfig;
|
|
||||||
// const {
|
|
||||||
// azureOptions,
|
|
||||||
// baseURL,
|
|
||||||
// headers = {},
|
|
||||||
// serverless,
|
|
||||||
// } = mapModelToAzureConfig({
|
|
||||||
// modelName: modelOptions.model,
|
|
||||||
// modelGroupMap,
|
|
||||||
// groupMap,
|
|
||||||
// });
|
|
||||||
// opts.defaultHeaders = resolveHeaders(headers);
|
|
||||||
// this.langchainProxy = extractBaseURL(baseURL);
|
|
||||||
// this.apiKey = azureOptions.azureOpenAIApiKey;
|
|
||||||
|
|
||||||
// const groupName = modelGroupMap[modelOptions.model].group;
|
|
||||||
// this.options.addParams = azureConfig.groupMap[groupName].addParams;
|
|
||||||
// this.options.dropParams = azureConfig.groupMap[groupName].dropParams;
|
|
||||||
// // Note: `forcePrompt` not re-assigned as only chat models are vision models
|
|
||||||
|
|
||||||
// this.azure = !serverless && azureOptions;
|
|
||||||
// this.azureEndpoint =
|
|
||||||
// !serverless && genAzureChatCompletion(this.azure, modelOptions.model, this);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (this.azure || this.options.azure) {
|
|
||||||
// /* Azure Bug, extremely short default `max_tokens` response */
|
|
||||||
// if (!modelOptions.max_tokens && modelOptions.model === 'gpt-4-vision-preview') {
|
|
||||||
// modelOptions.max_tokens = 4000;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// /* Azure does not accept `model` in the body, so we need to remove it. */
|
|
||||||
// delete modelOptions.model;
|
|
||||||
|
|
||||||
// opts.baseURL = this.langchainProxy
|
|
||||||
// ? constructAzureURL({
|
|
||||||
// baseURL: this.langchainProxy,
|
|
||||||
// azureOptions: this.azure,
|
|
||||||
// })
|
|
||||||
// : this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
|
|
||||||
|
|
||||||
// opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
|
|
||||||
// opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (process.env.OPENAI_ORGANIZATION) {
|
|
||||||
// opts.organization = process.env.OPENAI_ORGANIZATION;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (this.options.addParams && typeof this.options.addParams === 'object') {
|
|
||||||
// modelOptions = {
|
|
||||||
// ...modelOptions,
|
|
||||||
// ...this.options.addParams,
|
|
||||||
// };
|
|
||||||
// logger.debug('[api/server/controllers/agents/client.js #chatCompletion] added params', {
|
|
||||||
// addParams: this.options.addParams,
|
|
||||||
// modelOptions,
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
|
||||||
// this.options.dropParams.forEach((param) => {
|
|
||||||
// delete modelOptions[param];
|
|
||||||
// });
|
|
||||||
// logger.debug('[api/server/controllers/agents/client.js #chatCompletion] dropped params', {
|
|
||||||
// dropParams: this.options.dropParams,
|
|
||||||
// modelOptions,
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
/** @type {TCustomConfig['endpoints']['agents']} */
|
/** @type {TCustomConfig['endpoints']['agents']} */
|
||||||
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
|
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
|
||||||
|
|
||||||
@@ -647,6 +675,7 @@ class AgentClient extends BaseClient {
|
|||||||
last_agent_index: this.agentConfigs?.size ?? 0,
|
last_agent_index: this.agentConfigs?.size ?? 0,
|
||||||
user_id: this.user ?? this.options.req.user?.id,
|
user_id: this.user ?? this.options.req.user?.id,
|
||||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||||
|
user: this.options.req.user,
|
||||||
},
|
},
|
||||||
recursionLimit: agentsEConfig?.recursionLimit,
|
recursionLimit: agentsEConfig?.recursionLimit,
|
||||||
signal: abortController.signal,
|
signal: abortController.signal,
|
||||||
@@ -654,6 +683,8 @@ class AgentClient extends BaseClient {
|
|||||||
version: 'v2',
|
version: 'v2',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const getUserMCPAuthMap = await createGetMCPAuthMap();
|
||||||
|
|
||||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||||
payload,
|
payload,
|
||||||
@@ -734,6 +765,10 @@ class AgentClient extends BaseClient {
|
|||||||
messages = addCacheControl(messages);
|
messages = addCacheControl(messages);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (i === 0) {
|
||||||
|
memoryPromise = this.runMemory(messages);
|
||||||
|
}
|
||||||
|
|
||||||
run = await createRun({
|
run = await createRun({
|
||||||
agent,
|
agent,
|
||||||
req: this.options.req,
|
req: this.options.req,
|
||||||
@@ -769,10 +804,23 @@ class AgentClient extends BaseClient {
|
|||||||
run.Graph.contentData = contentData;
|
run.Graph.contentData = contentData;
|
||||||
}
|
}
|
||||||
|
|
||||||
const encoding = this.getEncoding();
|
try {
|
||||||
|
if (getUserMCPAuthMap) {
|
||||||
|
config.configurable.userMCPAuthMap = await getUserMCPAuthMap({
|
||||||
|
tools: agent.tools,
|
||||||
|
userId: this.options.req.user.id,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(
|
||||||
|
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`,
|
||||||
|
err,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await run.processStream({ messages }, config, {
|
await run.processStream({ messages }, config, {
|
||||||
keepContent: i !== 0,
|
keepContent: i !== 0,
|
||||||
tokenCounter: createTokenCounter(encoding),
|
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||||
indexTokenCountMap: currentIndexCountMap,
|
indexTokenCountMap: currentIndexCountMap,
|
||||||
maxContextTokens: agent.maxContextTokens,
|
maxContextTokens: agent.maxContextTokens,
|
||||||
callbacks: {
|
callbacks: {
|
||||||
@@ -887,6 +935,12 @@ class AgentClient extends BaseClient {
|
|||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (memoryPromise) {
|
||||||
|
const attachments = await memoryPromise;
|
||||||
|
if (attachments && attachments.length > 0) {
|
||||||
|
this.artifactPromises.push(...attachments);
|
||||||
|
}
|
||||||
|
}
|
||||||
await this.recordCollectedUsage({ context: 'message' });
|
await this.recordCollectedUsage({ context: 'message' });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(
|
logger.error(
|
||||||
@@ -895,6 +949,12 @@ class AgentClient extends BaseClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
if (memoryPromise) {
|
||||||
|
const attachments = await memoryPromise;
|
||||||
|
if (attachments && attachments.length > 0) {
|
||||||
|
this.artifactPromises.push(...attachments);
|
||||||
|
}
|
||||||
|
}
|
||||||
logger.error(
|
logger.error(
|
||||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||||
err,
|
err,
|
||||||
|
|||||||
@@ -228,7 +228,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||||||
// Save user message if needed
|
// Save user message if needed
|
||||||
if (!client.skipSaveUserMessage) {
|
if (!client.skipSaveUserMessage) {
|
||||||
await saveMessage(req, userMessage, {
|
await saveMessage(req, userMessage, {
|
||||||
context: 'api/server/controllers/agents/request.js - don\'t skip saving user message',
|
context: "api/server/controllers/agents/request.js - don't skip saving user message",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
const { Run, Providers } = require('@librechat/agents');
|
|
||||||
const { providerEndpointMap, KnownEndpoints } = require('librechat-data-provider');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {import('@librechat/agents').t} t
|
|
||||||
* @typedef {import('@librechat/agents').StandardGraphConfig} StandardGraphConfig
|
|
||||||
* @typedef {import('@librechat/agents').StreamEventData} StreamEventData
|
|
||||||
* @typedef {import('@librechat/agents').EventHandler} EventHandler
|
|
||||||
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
|
|
||||||
* @typedef {import('@librechat/agents').LLMConfig} LLMConfig
|
|
||||||
* @typedef {import('@librechat/agents').IState} IState
|
|
||||||
*/
|
|
||||||
|
|
||||||
const customProviders = new Set([
|
|
||||||
Providers.XAI,
|
|
||||||
Providers.OLLAMA,
|
|
||||||
Providers.DEEPSEEK,
|
|
||||||
Providers.OPENROUTER,
|
|
||||||
]);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new Run instance with custom handlers and configuration.
|
|
||||||
*
|
|
||||||
* @param {Object} options - The options for creating the Run instance.
|
|
||||||
* @param {ServerRequest} [options.req] - The server request.
|
|
||||||
* @param {string | undefined} [options.runId] - Optional run ID; otherwise, a new run ID will be generated.
|
|
||||||
* @param {Agent} options.agent - The agent for this run.
|
|
||||||
* @param {AbortSignal} options.signal - The signal for this run.
|
|
||||||
* @param {Record<GraphEvents, EventHandler> | undefined} [options.customHandlers] - Custom event handlers.
|
|
||||||
* @param {boolean} [options.streaming=true] - Whether to use streaming.
|
|
||||||
* @param {boolean} [options.streamUsage=true] - Whether to stream usage information.
|
|
||||||
* @returns {Promise<Run<IState>>} A promise that resolves to a new Run instance.
|
|
||||||
*/
|
|
||||||
async function createRun({
|
|
||||||
runId,
|
|
||||||
agent,
|
|
||||||
signal,
|
|
||||||
customHandlers,
|
|
||||||
streaming = true,
|
|
||||||
streamUsage = true,
|
|
||||||
}) {
|
|
||||||
const provider = providerEndpointMap[agent.provider] ?? agent.provider;
|
|
||||||
/** @type {LLMConfig} */
|
|
||||||
const llmConfig = Object.assign(
|
|
||||||
{
|
|
||||||
provider,
|
|
||||||
streaming,
|
|
||||||
streamUsage,
|
|
||||||
},
|
|
||||||
agent.model_parameters,
|
|
||||||
);
|
|
||||||
|
|
||||||
/** Resolves issues with new OpenAI usage field */
|
|
||||||
if (
|
|
||||||
customProviders.has(agent.provider) ||
|
|
||||||
(agent.provider === Providers.OPENAI && agent.endpoint !== agent.provider)
|
|
||||||
) {
|
|
||||||
llmConfig.streamUsage = false;
|
|
||||||
llmConfig.usage = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @type {'reasoning_content' | 'reasoning'} */
|
|
||||||
let reasoningKey;
|
|
||||||
if (
|
|
||||||
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
|
|
||||||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
|
|
||||||
) {
|
|
||||||
reasoningKey = 'reasoning';
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @type {StandardGraphConfig} */
|
|
||||||
const graphConfig = {
|
|
||||||
signal,
|
|
||||||
llmConfig,
|
|
||||||
reasoningKey,
|
|
||||||
tools: agent.tools,
|
|
||||||
instructions: agent.instructions,
|
|
||||||
additional_instructions: agent.additional_instructions,
|
|
||||||
// toolEnd: agent.end_after_tools,
|
|
||||||
};
|
|
||||||
|
|
||||||
// TEMPORARY FOR TESTING
|
|
||||||
if (agent.provider === Providers.ANTHROPIC || agent.provider === Providers.BEDROCK) {
|
|
||||||
graphConfig.streamBuffer = 2000;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Run.create({
|
|
||||||
runId,
|
|
||||||
graphConfig,
|
|
||||||
customHandlers,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { createRun };
|
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
const { nanoid } = require('nanoid');
|
const { nanoid } = require('nanoid');
|
||||||
|
const { logger, PermissionBits } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
Tools,
|
Tools,
|
||||||
Constants,
|
|
||||||
FileContext,
|
|
||||||
FileSources,
|
|
||||||
SystemRoles,
|
SystemRoles,
|
||||||
|
FileSources,
|
||||||
EToolResources,
|
EToolResources,
|
||||||
actionDelimiter,
|
actionDelimiter,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
@@ -14,17 +13,22 @@ const {
|
|||||||
createAgent,
|
createAgent,
|
||||||
updateAgent,
|
updateAgent,
|
||||||
deleteAgent,
|
deleteAgent,
|
||||||
getListAgents,
|
getListAgentsByAccess,
|
||||||
} = require('~/models/Agent');
|
} = require('~/models/Agent');
|
||||||
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
|
const {
|
||||||
|
grantPermission,
|
||||||
|
findAccessibleResources,
|
||||||
|
findPubliclyAccessibleResources,
|
||||||
|
hasPublicPermission,
|
||||||
|
} = require('~/server/services/PermissionService');
|
||||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
|
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||||
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
|
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
|
||||||
|
const { filterFile } = require('~/server/services/Files/process');
|
||||||
const { updateAction, getActions } = require('~/models/Action');
|
const { updateAction, getActions } = require('~/models/Action');
|
||||||
const { updateAgentProjects } = require('~/models/Agent');
|
const { getCachedTools } = require('~/server/services/Config');
|
||||||
const { getProjectByName } = require('~/models/Project');
|
|
||||||
const { deleteFileByFilter } = require('~/models/File');
|
|
||||||
const { revertAgentVersion } = require('~/models/Agent');
|
const { revertAgentVersion } = require('~/models/Agent');
|
||||||
const { logger } = require('~/config');
|
const { deleteFileByFilter } = require('~/models/File');
|
||||||
|
|
||||||
const systemTools = {
|
const systemTools = {
|
||||||
[Tools.execute_code]: true,
|
[Tools.execute_code]: true,
|
||||||
@@ -46,8 +50,9 @@ const createAgentHandler = async (req, res) => {
|
|||||||
|
|
||||||
agentData.tools = [];
|
agentData.tools = [];
|
||||||
|
|
||||||
|
const availableTools = await getCachedTools({ includeGlobal: true });
|
||||||
for (const tool of tools) {
|
for (const tool of tools) {
|
||||||
if (req.app.locals.availableTools[tool]) {
|
if (availableTools[tool]) {
|
||||||
agentData.tools.push(tool);
|
agentData.tools.push(tool);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,6 +72,27 @@ const createAgentHandler = async (req, res) => {
|
|||||||
|
|
||||||
agentData.id = `agent_${nanoid()}`;
|
agentData.id = `agent_${nanoid()}`;
|
||||||
const agent = await createAgent(agentData);
|
const agent = await createAgent(agentData);
|
||||||
|
|
||||||
|
// Automatically grant owner permissions to the creator
|
||||||
|
try {
|
||||||
|
await grantPermission({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: userId,
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
accessRoleId: 'agent_owner',
|
||||||
|
grantedBy: userId,
|
||||||
|
});
|
||||||
|
logger.debug(
|
||||||
|
`[createAgent] Granted owner permissions to user ${userId} for agent ${agent.id}`,
|
||||||
|
);
|
||||||
|
} catch (permissionError) {
|
||||||
|
logger.error(
|
||||||
|
`[createAgent] Failed to grant owner permissions for agent ${agent.id}:`,
|
||||||
|
permissionError,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
res.status(201).json(agent);
|
res.status(201).json(agent);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/Agents] Error creating agent', error);
|
logger.error('[/Agents] Error creating agent', error);
|
||||||
@@ -85,21 +111,14 @@ const createAgentHandler = async (req, res) => {
|
|||||||
* @returns {Promise<Agent>} 200 - success response - application/json
|
* @returns {Promise<Agent>} 200 - success response - application/json
|
||||||
* @returns {Error} 404 - Agent not found
|
* @returns {Error} 404 - Agent not found
|
||||||
*/
|
*/
|
||||||
const getAgentHandler = async (req, res) => {
|
const getAgentHandler = async (req, res, expandProperties = false) => {
|
||||||
try {
|
try {
|
||||||
const id = req.params.id;
|
const id = req.params.id;
|
||||||
const author = req.user.id;
|
const author = req.user.id;
|
||||||
|
|
||||||
let query = { id, author };
|
// Permissions are validated by middleware before calling this function
|
||||||
|
// Simply load the agent by ID
|
||||||
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, ['agentIds']);
|
const agent = await getAgent({ id });
|
||||||
if (globalProject && (globalProject.agentIds?.length ?? 0) > 0) {
|
|
||||||
query = {
|
|
||||||
$or: [{ id, $in: globalProject.agentIds }, query],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const agent = await getAgent(query);
|
|
||||||
|
|
||||||
if (!agent) {
|
if (!agent) {
|
||||||
return res.status(404).json({ error: 'Agent not found' });
|
return res.status(404).json({ error: 'Agent not found' });
|
||||||
@@ -116,23 +135,45 @@ const getAgentHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
agent.author = agent.author.toString();
|
agent.author = agent.author.toString();
|
||||||
|
|
||||||
|
// @deprecated - isCollaborative replaced by ACL permissions
|
||||||
agent.isCollaborative = !!agent.isCollaborative;
|
agent.isCollaborative = !!agent.isCollaborative;
|
||||||
|
|
||||||
|
// Check if agent is public
|
||||||
|
const isPublic = await hasPublicPermission({
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
requiredPermissions: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
agent.isPublic = isPublic;
|
||||||
|
|
||||||
if (agent.author !== author) {
|
if (agent.author !== author) {
|
||||||
delete agent.author;
|
delete agent.author;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!agent.isCollaborative && agent.author !== author && req.user.role !== SystemRoles.ADMIN) {
|
if (!expandProperties) {
|
||||||
|
// VIEW permission: Basic agent info only
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
|
_id: agent._id,
|
||||||
id: agent.id,
|
id: agent.id,
|
||||||
name: agent.name,
|
name: agent.name,
|
||||||
|
description: agent.description,
|
||||||
avatar: agent.avatar,
|
avatar: agent.avatar,
|
||||||
author: agent.author,
|
author: agent.author,
|
||||||
|
provider: agent.provider,
|
||||||
|
model: agent.model,
|
||||||
projectIds: agent.projectIds,
|
projectIds: agent.projectIds,
|
||||||
|
// @deprecated - isCollaborative replaced by ACL permissions
|
||||||
isCollaborative: agent.isCollaborative,
|
isCollaborative: agent.isCollaborative,
|
||||||
|
isPublic: agent.isPublic,
|
||||||
version: agent.version,
|
version: agent.version,
|
||||||
|
// Safe metadata
|
||||||
|
createdAt: agent.createdAt,
|
||||||
|
updatedAt: agent.updatedAt,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EDIT permission: Full agent details including sensitive configuration
|
||||||
return res.status(200).json(agent);
|
return res.status(200).json(agent);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/Agents/:id] Error retrieving agent', error);
|
logger.error('[/Agents/:id] Error retrieving agent', error);
|
||||||
@@ -152,36 +193,20 @@ const getAgentHandler = async (req, res) => {
|
|||||||
const updateAgentHandler = async (req, res) => {
|
const updateAgentHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const id = req.params.id;
|
const id = req.params.id;
|
||||||
const { projectIds, removeProjectIds, ...updateData } = req.body;
|
const { _id, ...updateData } = req.body;
|
||||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
|
||||||
const existingAgent = await getAgent({ id });
|
const existingAgent = await getAgent({ id });
|
||||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
|
||||||
|
|
||||||
if (!existingAgent) {
|
if (!existingAgent) {
|
||||||
return res.status(404).json({ error: 'Agent not found' });
|
return res.status(404).json({ error: 'Agent not found' });
|
||||||
}
|
}
|
||||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
|
||||||
|
|
||||||
if (!hasEditPermission) {
|
|
||||||
return res.status(403).json({
|
|
||||||
error: 'You do not have permission to modify this non-collaborative agent',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let updatedAgent =
|
let updatedAgent =
|
||||||
Object.keys(updateData).length > 0
|
Object.keys(updateData).length > 0
|
||||||
? await updateAgent({ id }, updateData, { updatingUserId: req.user.id })
|
? await updateAgent({ id }, updateData, {
|
||||||
|
updatingUserId: req.user.id,
|
||||||
|
})
|
||||||
: existingAgent;
|
: existingAgent;
|
||||||
|
|
||||||
if (projectIds || removeProjectIds) {
|
|
||||||
updatedAgent = await updateAgentProjects({
|
|
||||||
user: req.user,
|
|
||||||
agentId: id,
|
|
||||||
projectIds,
|
|
||||||
removeProjectIds,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (updatedAgent.author) {
|
if (updatedAgent.author) {
|
||||||
updatedAgent.author = updatedAgent.author.toString();
|
updatedAgent.author = updatedAgent.author.toString();
|
||||||
}
|
}
|
||||||
@@ -299,6 +324,26 @@ const duplicateAgentHandler = async (req, res) => {
|
|||||||
newAgentData.actions = agentActions;
|
newAgentData.actions = agentActions;
|
||||||
const newAgent = await createAgent(newAgentData);
|
const newAgent = await createAgent(newAgentData);
|
||||||
|
|
||||||
|
// Automatically grant owner permissions to the duplicator
|
||||||
|
try {
|
||||||
|
await grantPermission({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: userId,
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: newAgent._id,
|
||||||
|
accessRoleId: 'agent_owner',
|
||||||
|
grantedBy: userId,
|
||||||
|
});
|
||||||
|
logger.debug(
|
||||||
|
`[duplicateAgent] Granted owner permissions to user ${userId} for duplicated agent ${newAgent.id}`,
|
||||||
|
);
|
||||||
|
} catch (permissionError) {
|
||||||
|
logger.error(
|
||||||
|
`[duplicateAgent] Failed to grant owner permissions for duplicated agent ${newAgent.id}:`,
|
||||||
|
permissionError,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return res.status(201).json({
|
return res.status(201).json({
|
||||||
agent: newAgent,
|
agent: newAgent,
|
||||||
actions: newActionsList,
|
actions: newActionsList,
|
||||||
@@ -325,7 +370,7 @@ const deleteAgentHandler = async (req, res) => {
|
|||||||
if (!agent) {
|
if (!agent) {
|
||||||
return res.status(404).json({ error: 'Agent not found' });
|
return res.status(404).json({ error: 'Agent not found' });
|
||||||
}
|
}
|
||||||
await deleteAgent({ id, author: req.user.id });
|
await deleteAgent({ id });
|
||||||
return res.json({ message: 'Agent deleted' });
|
return res.json({ message: 'Agent deleted' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/Agents/:id] Error deleting Agent', error);
|
logger.error('[/Agents/:id] Error deleting Agent', error);
|
||||||
@@ -334,7 +379,7 @@ const deleteAgentHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* Lists agents using ACL-aware permissions (ownership + explicit shares).
|
||||||
* @route GET /Agents
|
* @route GET /Agents
|
||||||
* @param {object} req - Express Request
|
* @param {object} req - Express Request
|
||||||
* @param {object} req.query - Request query
|
* @param {object} req.query - Request query
|
||||||
@@ -343,9 +388,31 @@ const deleteAgentHandler = async (req, res) => {
|
|||||||
*/
|
*/
|
||||||
const getListAgentsHandler = async (req, res) => {
|
const getListAgentsHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const data = await getListAgents({
|
const userId = req.user.id;
|
||||||
author: req.user.id,
|
|
||||||
|
// Get agent IDs the user has VIEW access to via ACL
|
||||||
|
const accessibleIds = await findAccessibleResources({
|
||||||
|
userId,
|
||||||
|
resourceType: 'agent',
|
||||||
|
requiredPermissions: PermissionBits.VIEW,
|
||||||
});
|
});
|
||||||
|
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||||
|
resourceType: 'agent',
|
||||||
|
requiredPermissions: PermissionBits.VIEW,
|
||||||
|
});
|
||||||
|
// Use the new ACL-aware function
|
||||||
|
const data = await getListAgentsByAccess({
|
||||||
|
accessibleIds,
|
||||||
|
otherParams: {}, // Can add query params here if needed
|
||||||
|
});
|
||||||
|
if (data?.data?.length) {
|
||||||
|
data.data = data.data.map((agent) => {
|
||||||
|
if (publiclyAccessibleIds.some((id) => id.equals(agent._id))) {
|
||||||
|
agent.isPublic = true;
|
||||||
|
}
|
||||||
|
return agent;
|
||||||
|
});
|
||||||
|
}
|
||||||
return res.json(data);
|
return res.json(data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/Agents] Error listing Agents', error);
|
logger.error('[/Agents] Error listing Agents', error);
|
||||||
@@ -373,12 +440,27 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const buffer = await fs.readFile(req.file.path);
|
const buffer = await fs.readFile(req.file.path);
|
||||||
const image = await uploadImageBuffer({
|
|
||||||
req,
|
const fileStrategy = req.app.locals.fileStrategy;
|
||||||
context: FileContext.avatar,
|
|
||||||
metadata: { buffer },
|
const resizedBuffer = await resizeAvatar({
|
||||||
|
userId: req.user.id,
|
||||||
|
input: buffer,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||||
|
const avatarUrl = await processAvatar({
|
||||||
|
buffer: resizedBuffer,
|
||||||
|
userId: req.user.id,
|
||||||
|
manual: 'false',
|
||||||
|
agentId: agent_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
const image = {
|
||||||
|
filepath: avatarUrl,
|
||||||
|
source: fileStrategy,
|
||||||
|
};
|
||||||
|
|
||||||
let _avatar;
|
let _avatar;
|
||||||
try {
|
try {
|
||||||
const agent = await getAgent({ id: agent_id });
|
const agent = await getAgent({ id: agent_id });
|
||||||
@@ -403,12 +485,12 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||||||
const data = {
|
const data = {
|
||||||
avatar: {
|
avatar: {
|
||||||
filepath: image.filepath,
|
filepath: image.filepath,
|
||||||
source: req.app.locals.fileStrategy,
|
source: image.source,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
promises.push(
|
promises.push(
|
||||||
await updateAgent({ id: agent_id, author: req.user.id }, data, {
|
await updateAgent({ id: agent_id }, data, {
|
||||||
updatingUserId: req.user.id,
|
updatingUserId: req.user.id,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
@@ -423,7 +505,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
await fs.unlink(req.file.path);
|
await fs.unlink(req.file.path);
|
||||||
logger.debug('[/:agent_id/avatar] Temp. image upload file deleted');
|
logger.debug('[/:agent_id/avatar] Temp. image upload file deleted');
|
||||||
} catch (error) {
|
} catch {
|
||||||
logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted');
|
logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { FileContext } = require('librechat-data-provider');
|
const { FileContext } = require('librechat-data-provider');
|
||||||
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
|
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
|
||||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
@@ -6,9 +7,9 @@ const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
|||||||
const { deleteAssistantActions } = require('~/server/services/ActionService');
|
const { deleteAssistantActions } = require('~/server/services/ActionService');
|
||||||
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
|
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
|
||||||
const { getOpenAIClient, fetchAssistants } = require('./helpers');
|
const { getOpenAIClient, fetchAssistants } = require('./helpers');
|
||||||
|
const { getCachedTools } = require('~/server/services/Config');
|
||||||
const { manifestToolMap } = require('~/app/clients/tools');
|
const { manifestToolMap } = require('~/app/clients/tools');
|
||||||
const { deleteFileByFilter } = require('~/models/File');
|
const { deleteFileByFilter } = require('~/models/File');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an assistant.
|
* Create an assistant.
|
||||||
@@ -30,21 +31,20 @@ const createAssistant = async (req, res) => {
|
|||||||
delete assistantData.conversation_starters;
|
delete assistantData.conversation_starters;
|
||||||
delete assistantData.append_current_datetime;
|
delete assistantData.append_current_datetime;
|
||||||
|
|
||||||
|
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||||
|
|
||||||
assistantData.tools = tools
|
assistantData.tools = tools
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
return tool;
|
return tool;
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolDefinitions = req.app.locals.availableTools;
|
|
||||||
const toolDef = toolDefinitions[tool];
|
const toolDef = toolDefinitions[tool];
|
||||||
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
||||||
return (
|
return Object.entries(toolDefinitions)
|
||||||
Object.entries(toolDefinitions)
|
.filter(([key]) => key.startsWith(`${tool}_`))
|
||||||
.filter(([key]) => key.startsWith(`${tool}_`))
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
.map(([_, val]) => val);
|
||||||
.map(([_, val]) => val)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return toolDef;
|
return toolDef;
|
||||||
@@ -135,21 +135,21 @@ const patchAssistant = async (req, res) => {
|
|||||||
append_current_datetime,
|
append_current_datetime,
|
||||||
...updateData
|
...updateData
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
|
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||||
|
|
||||||
updateData.tools = (updateData.tools ?? [])
|
updateData.tools = (updateData.tools ?? [])
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
return tool;
|
return tool;
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolDefinitions = req.app.locals.availableTools;
|
|
||||||
const toolDef = toolDefinitions[tool];
|
const toolDef = toolDefinitions[tool];
|
||||||
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
||||||
return (
|
return Object.entries(toolDefinitions)
|
||||||
Object.entries(toolDefinitions)
|
.filter(([key]) => key.startsWith(`${tool}_`))
|
||||||
.filter(([key]) => key.startsWith(`${tool}_`))
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
.map(([_, val]) => val);
|
||||||
.map(([_, val]) => val)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return toolDef;
|
return toolDef;
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { ToolCallTypes } = require('librechat-data-provider');
|
const { ToolCallTypes } = require('librechat-data-provider');
|
||||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||||
const { validateAndUpdateTool } = require('~/server/services/ActionService');
|
const { validateAndUpdateTool } = require('~/server/services/ActionService');
|
||||||
|
const { getCachedTools } = require('~/server/services/Config');
|
||||||
const { updateAssistantDoc } = require('~/models/Assistant');
|
const { updateAssistantDoc } = require('~/models/Assistant');
|
||||||
const { manifestToolMap } = require('~/app/clients/tools');
|
const { manifestToolMap } = require('~/app/clients/tools');
|
||||||
const { getOpenAIClient } = require('./helpers');
|
const { getOpenAIClient } = require('./helpers');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an assistant.
|
* Create an assistant.
|
||||||
@@ -27,21 +28,20 @@ const createAssistant = async (req, res) => {
|
|||||||
delete assistantData.conversation_starters;
|
delete assistantData.conversation_starters;
|
||||||
delete assistantData.append_current_datetime;
|
delete assistantData.append_current_datetime;
|
||||||
|
|
||||||
|
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||||
|
|
||||||
assistantData.tools = tools
|
assistantData.tools = tools
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
return tool;
|
return tool;
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolDefinitions = req.app.locals.availableTools;
|
|
||||||
const toolDef = toolDefinitions[tool];
|
const toolDef = toolDefinitions[tool];
|
||||||
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
||||||
return (
|
return Object.entries(toolDefinitions)
|
||||||
Object.entries(toolDefinitions)
|
.filter(([key]) => key.startsWith(`${tool}_`))
|
||||||
.filter(([key]) => key.startsWith(`${tool}_`))
|
|
||||||
// eslint-disable-next-line no-unused-vars
|
.map(([_, val]) => val);
|
||||||
.map(([_, val]) => val)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return toolDef;
|
return toolDef;
|
||||||
@@ -125,13 +125,13 @@ const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
|
|||||||
|
|
||||||
let hasFileSearch = false;
|
let hasFileSearch = false;
|
||||||
for (const tool of updateData.tools ?? []) {
|
for (const tool of updateData.tools ?? []) {
|
||||||
const toolDefinitions = req.app.locals.availableTools;
|
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||||
let actualTool = typeof tool === 'string' ? toolDefinitions[tool] : tool;
|
let actualTool = typeof tool === 'string' ? toolDefinitions[tool] : tool;
|
||||||
|
|
||||||
if (!actualTool && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
if (!actualTool && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
|
||||||
actualTool = Object.entries(toolDefinitions)
|
actualTool = Object.entries(toolDefinitions)
|
||||||
.filter(([key]) => key.startsWith(`${tool}_`))
|
.filter(([key]) => key.startsWith(`${tool}_`))
|
||||||
// eslint-disable-next-line no-unused-vars
|
|
||||||
.map(([_, val]) => val);
|
.map(([_, val]) => val);
|
||||||
} else if (!actualTool) {
|
} else if (!actualTool) {
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
const jwt = require('jsonwebtoken');
|
const jwt = require('jsonwebtoken');
|
||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const {
|
const {
|
||||||
verifyTOTP,
|
verifyTOTP,
|
||||||
verifyBackupCode,
|
|
||||||
getTOTPSecret,
|
getTOTPSecret,
|
||||||
|
verifyBackupCode,
|
||||||
} = require('~/server/services/twoFactorService');
|
} = require('~/server/services/twoFactorService');
|
||||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||||
const { getUserById } = require('~/models/userMethods');
|
const { getUserById } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verifies the 2FA code during login using a temporary token.
|
* Verifies the 2FA code during login using a temporary token.
|
||||||
|
|||||||
@@ -1,21 +1,22 @@
|
|||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
|
const fs = require('fs');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
require('module-alias')({ base: path.resolve(__dirname, '..') });
|
require('module-alias')({ base: path.resolve(__dirname, '..') });
|
||||||
const cors = require('cors');
|
const cors = require('cors');
|
||||||
const axios = require('axios');
|
const axios = require('axios');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const compression = require('compression');
|
|
||||||
const passport = require('passport');
|
const passport = require('passport');
|
||||||
const mongoSanitize = require('express-mongo-sanitize');
|
const compression = require('compression');
|
||||||
const fs = require('fs');
|
|
||||||
const cookieParser = require('cookie-parser');
|
const cookieParser = require('cookie-parser');
|
||||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
const { isEnabled } = require('@librechat/api');
|
||||||
const { connectDb, indexSync } = require('~/lib/db');
|
const { logger } = require('@librechat/data-schemas');
|
||||||
const { isEnabled } = require('~/server/utils');
|
const mongoSanitize = require('express-mongo-sanitize');
|
||||||
const { ldapLogin } = require('~/strategies');
|
const { connectDb, indexSync } = require('~/db');
|
||||||
const { logger } = require('~/config');
|
|
||||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||||
|
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
|
||||||
const errorController = require('./controllers/ErrorController');
|
const errorController = require('./controllers/ErrorController');
|
||||||
|
const initializeMCP = require('./services/initializeMCP');
|
||||||
const configureSocialLogins = require('./socialLogins');
|
const configureSocialLogins = require('./socialLogins');
|
||||||
const AppService = require('./services/AppService');
|
const AppService = require('./services/AppService');
|
||||||
const staticCache = require('./utils/staticCache');
|
const staticCache = require('./utils/staticCache');
|
||||||
@@ -36,6 +37,7 @@ const startServer = async () => {
|
|||||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||||
}
|
}
|
||||||
await connectDb();
|
await connectDb();
|
||||||
|
|
||||||
logger.info('Connected to MongoDB');
|
logger.info('Connected to MongoDB');
|
||||||
await indexSync();
|
await indexSync();
|
||||||
|
|
||||||
@@ -115,8 +117,11 @@ const startServer = async () => {
|
|||||||
app.use('/api/agents', routes.agents);
|
app.use('/api/agents', routes.agents);
|
||||||
app.use('/api/banner', routes.banner);
|
app.use('/api/banner', routes.banner);
|
||||||
app.use('/api/bedrock', routes.bedrock);
|
app.use('/api/bedrock', routes.bedrock);
|
||||||
|
app.use('/api/memories', routes.memories);
|
||||||
|
app.use('/api/permissions', routes.accessPermissions);
|
||||||
|
|
||||||
app.use('/api/tags', routes.tags);
|
app.use('/api/tags', routes.tags);
|
||||||
|
app.use('/api/mcp', routes.mcp);
|
||||||
|
|
||||||
app.use((req, res) => {
|
app.use((req, res) => {
|
||||||
res.set({
|
res.set({
|
||||||
@@ -140,6 +145,8 @@ const startServer = async () => {
|
|||||||
} else {
|
} else {
|
||||||
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
initializeMCP(app);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -182,5 +189,5 @@ process.on('uncaughtException', (err) => {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
// export app for easier testing purposes
|
/** Export app for easier testing purposes */
|
||||||
module.exports = app;
|
module.exports = app;
|
||||||
|
|||||||
@@ -327,7 +327,7 @@ const handleAbortError = async (res, req, error, data) => {
|
|||||||
errorText = `{"type":"${ErrorTypes.INVALID_REQUEST}"}`;
|
errorText = `{"type":"${ErrorTypes.INVALID_REQUEST}"}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error?.message?.includes('does not support \'system\'')) {
|
if (error?.message?.includes("does not support 'system'")) {
|
||||||
errorText = `{"type":"${ErrorTypes.NO_SYSTEM_MESSAGES}"}`;
|
errorText = `{"type":"${ErrorTypes.NO_SYSTEM_MESSAGES}"}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,97 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { Constants, isAgentsEndpoint } = require('librechat-data-provider');
|
||||||
|
const { canAccessResource } = require('./canAccessResource');
|
||||||
|
const { getAgent } = require('~/models/Agent');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Agent ID resolver function for agent_id from request body
|
||||||
|
* Resolves custom agent ID (e.g., "agent_abc123") to MongoDB ObjectId
|
||||||
|
* This is used specifically for chat routes where agent_id comes from request body
|
||||||
|
*
|
||||||
|
* @param {string} agentCustomId - Custom agent ID from request body
|
||||||
|
* @returns {Promise<Object|null>} Agent document with _id field, or null if not found
|
||||||
|
*/
|
||||||
|
const resolveAgentIdFromBody = async (agentCustomId) => {
|
||||||
|
// Handle ephemeral agents - they don't need permission checks
|
||||||
|
if (agentCustomId === Constants.EPHEMERAL_AGENT_ID) {
|
||||||
|
return null; // No permission check needed for ephemeral agents
|
||||||
|
}
|
||||||
|
|
||||||
|
return await getAgent({ id: agentCustomId });
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Middleware factory that creates middleware to check agent access permissions from request body.
|
||||||
|
* This middleware is specifically designed for chat routes where the agent_id comes from req.body
|
||||||
|
* instead of route parameters.
|
||||||
|
*
|
||||||
|
* @param {Object} options - Configuration options
|
||||||
|
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
|
||||||
|
* @returns {Function} Express middleware function
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Basic usage for agent chat (requires VIEW permission)
|
||||||
|
* router.post('/chat',
|
||||||
|
* canAccessAgentFromBody({ requiredPermission: PermissionBits.VIEW }),
|
||||||
|
* buildEndpointOption,
|
||||||
|
* chatController
|
||||||
|
* );
|
||||||
|
*/
|
||||||
|
const canAccessAgentFromBody = (options) => {
|
||||||
|
const { requiredPermission } = options;
|
||||||
|
|
||||||
|
// Validate required options
|
||||||
|
if (!requiredPermission || typeof requiredPermission !== 'number') {
|
||||||
|
throw new Error('canAccessAgentFromBody: requiredPermission is required and must be a number');
|
||||||
|
}
|
||||||
|
|
||||||
|
return async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
const { endpoint, agent_id } = req.body;
|
||||||
|
let agentId = agent_id;
|
||||||
|
|
||||||
|
if (!isAgentsEndpoint(endpoint)) {
|
||||||
|
agentId = Constants.EPHEMERAL_AGENT_ID;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!agentId) {
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Bad Request',
|
||||||
|
message: 'agent_id is required in request body',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip permission checks for ephemeral agents
|
||||||
|
if (agentId === Constants.EPHEMERAL_AGENT_ID) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentAccessMiddleware = canAccessResource({
|
||||||
|
resourceType: 'agent',
|
||||||
|
requiredPermission,
|
||||||
|
resourceIdParam: 'agent_id', // This will be ignored since we use custom resolver
|
||||||
|
idResolver: () => resolveAgentIdFromBody(agentId),
|
||||||
|
});
|
||||||
|
|
||||||
|
const tempReq = {
|
||||||
|
...req,
|
||||||
|
params: {
|
||||||
|
...req.params,
|
||||||
|
agent_id: agentId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return agentAccessMiddleware(tempReq, res, next);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to validate agent access permissions', error);
|
||||||
|
return res.status(500).json({
|
||||||
|
error: 'Internal Server Error',
|
||||||
|
message: 'Failed to validate agent access permissions',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
canAccessAgentFromBody,
|
||||||
|
};
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
const { getAgent } = require('~/models/Agent');
|
||||||
|
const { canAccessResource } = require('./canAccessResource');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Agent ID resolver function
|
||||||
|
* Resolves custom agent ID (e.g., "agent_abc123") to MongoDB ObjectId
|
||||||
|
*
|
||||||
|
* @param {string} agentCustomId - Custom agent ID from route parameter
|
||||||
|
* @returns {Promise<Object|null>} Agent document with _id field, or null if not found
|
||||||
|
*/
|
||||||
|
const resolveAgentId = async (agentCustomId) => {
|
||||||
|
return await getAgent({ id: agentCustomId });
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Agent-specific middleware factory that creates middleware to check agent access permissions.
|
||||||
|
* This middleware extends the generic canAccessResource to handle agent custom ID resolution.
|
||||||
|
*
|
||||||
|
* @param {Object} options - Configuration options
|
||||||
|
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
|
||||||
|
* @param {string} [options.resourceIdParam='id'] - The name of the route parameter containing the agent custom ID
|
||||||
|
* @returns {Function} Express middleware function
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Basic usage for viewing agents
|
||||||
|
* router.get('/agents/:id',
|
||||||
|
* canAccessAgentResource({ requiredPermission: 1 }),
|
||||||
|
* getAgent
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Custom resource ID parameter and edit permission
|
||||||
|
* router.patch('/agents/:agent_id',
|
||||||
|
* canAccessAgentResource({
|
||||||
|
* requiredPermission: 2,
|
||||||
|
* resourceIdParam: 'agent_id'
|
||||||
|
* }),
|
||||||
|
* updateAgent
|
||||||
|
* );
|
||||||
|
*/
|
||||||
|
const canAccessAgentResource = (options) => {
|
||||||
|
const { requiredPermission, resourceIdParam = 'id' } = options;
|
||||||
|
|
||||||
|
if (!requiredPermission || typeof requiredPermission !== 'number') {
|
||||||
|
throw new Error('canAccessAgentResource: requiredPermission is required and must be a number');
|
||||||
|
}
|
||||||
|
|
||||||
|
return canAccessResource({
|
||||||
|
resourceType: 'agent',
|
||||||
|
requiredPermission,
|
||||||
|
resourceIdParam,
|
||||||
|
idResolver: resolveAgentId,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
canAccessAgentResource,
|
||||||
|
};
|
||||||
@@ -0,0 +1,384 @@
|
|||||||
|
const mongoose = require('mongoose');
|
||||||
|
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||||
|
const { canAccessAgentResource } = require('./canAccessAgentResource');
|
||||||
|
const { User, Role, AclEntry } = require('~/db/models');
|
||||||
|
const { createAgent } = require('~/models/Agent');
|
||||||
|
|
||||||
|
describe('canAccessAgentResource middleware', () => {
|
||||||
|
let mongoServer;
|
||||||
|
let req, res, next;
|
||||||
|
let testUser;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
mongoServer = await MongoMemoryServer.create();
|
||||||
|
const mongoUri = mongoServer.getUri();
|
||||||
|
await mongoose.connect(mongoUri);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await mongoose.disconnect();
|
||||||
|
await mongoServer.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await mongoose.connection.dropDatabase();
|
||||||
|
await Role.create({
|
||||||
|
name: 'test-role',
|
||||||
|
permissions: {
|
||||||
|
AGENTS: {
|
||||||
|
USE: true,
|
||||||
|
CREATE: true,
|
||||||
|
SHARED_GLOBAL: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a test user
|
||||||
|
testUser = await User.create({
|
||||||
|
email: 'test@example.com',
|
||||||
|
name: 'Test User',
|
||||||
|
username: 'testuser',
|
||||||
|
role: 'test-role',
|
||||||
|
});
|
||||||
|
|
||||||
|
req = {
|
||||||
|
user: { id: testUser._id.toString(), role: 'test-role' },
|
||||||
|
params: {},
|
||||||
|
};
|
||||||
|
res = {
|
||||||
|
status: jest.fn().mockReturnThis(),
|
||||||
|
json: jest.fn(),
|
||||||
|
};
|
||||||
|
next = jest.fn();
|
||||||
|
|
||||||
|
jest.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('middleware factory', () => {
|
||||||
|
test('should throw error if requiredPermission is not provided', () => {
|
||||||
|
expect(() => canAccessAgentResource({})).toThrow(
|
||||||
|
'canAccessAgentResource: requiredPermission is required and must be a number',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should throw error if requiredPermission is not a number', () => {
|
||||||
|
expect(() => canAccessAgentResource({ requiredPermission: '1' })).toThrow(
|
||||||
|
'canAccessAgentResource: requiredPermission is required and must be a number',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should create middleware with default resourceIdParam', () => {
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 });
|
||||||
|
expect(typeof middleware).toBe('function');
|
||||||
|
expect(middleware.length).toBe(3); // Express middleware signature
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should create middleware with custom resourceIdParam', () => {
|
||||||
|
const middleware = canAccessAgentResource({
|
||||||
|
requiredPermission: 2,
|
||||||
|
resourceIdParam: 'agent_id',
|
||||||
|
});
|
||||||
|
expect(typeof middleware).toBe('function');
|
||||||
|
expect(middleware.length).toBe(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('permission checking with real agents', () => {
|
||||||
|
test('should allow access when user is the agent author', async () => {
|
||||||
|
// Create an agent owned by the test user
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry for the author (owner permissions)
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 15, // All permissions (1+2+4+8)
|
||||||
|
grantedBy: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agent.id;
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
expect(res.status).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should deny access when user is not the author and has no ACL entry', async () => {
|
||||||
|
// Create an agent owned by a different user
|
||||||
|
const otherUser = await User.create({
|
||||||
|
email: 'other@example.com',
|
||||||
|
name: 'Other User',
|
||||||
|
username: 'otheruser',
|
||||||
|
role: 'test-role',
|
||||||
|
});
|
||||||
|
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Other User Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry for the other user (owner)
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: otherUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 15, // All permissions
|
||||||
|
grantedBy: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agent.id;
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).not.toHaveBeenCalled();
|
||||||
|
expect(res.status).toHaveBeenCalledWith(403);
|
||||||
|
expect(res.json).toHaveBeenCalledWith({
|
||||||
|
error: 'Forbidden',
|
||||||
|
message: 'Insufficient permissions to access this agent',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should allow access when user has ACL entry with sufficient permissions', async () => {
|
||||||
|
// Create an agent owned by a different user
|
||||||
|
const otherUser = await User.create({
|
||||||
|
email: 'other2@example.com',
|
||||||
|
name: 'Other User 2',
|
||||||
|
username: 'otheruser2',
|
||||||
|
role: 'test-role',
|
||||||
|
});
|
||||||
|
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Shared Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry granting view permission to test user
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 1, // VIEW permission
|
||||||
|
grantedBy: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agent.id;
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
expect(res.status).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should deny access when ACL permissions are insufficient', async () => {
|
||||||
|
// Create an agent owned by a different user
|
||||||
|
const otherUser = await User.create({
|
||||||
|
email: 'other3@example.com',
|
||||||
|
name: 'Other User 3',
|
||||||
|
username: 'otheruser3',
|
||||||
|
role: 'test-role',
|
||||||
|
});
|
||||||
|
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Limited Access Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry granting only view permission
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 1, // VIEW permission only
|
||||||
|
grantedBy: otherUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agent.id;
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 2 }); // EDIT permission required
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).not.toHaveBeenCalled();
|
||||||
|
expect(res.status).toHaveBeenCalledWith(403);
|
||||||
|
expect(res.json).toHaveBeenCalledWith({
|
||||||
|
error: 'Forbidden',
|
||||||
|
message: 'Insufficient permissions to access this agent',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should handle non-existent agent', async () => {
|
||||||
|
req.params.id = 'agent_nonexistent';
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).not.toHaveBeenCalled();
|
||||||
|
expect(res.status).toHaveBeenCalledWith(404);
|
||||||
|
expect(res.json).toHaveBeenCalledWith({
|
||||||
|
error: 'Not Found',
|
||||||
|
message: 'agent not found',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should use custom resourceIdParam', async () => {
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Custom Param Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry for the author
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 15, // All permissions
|
||||||
|
grantedBy: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.agent_id = agent.id; // Using custom param name
|
||||||
|
|
||||||
|
const middleware = canAccessAgentResource({
|
||||||
|
requiredPermission: 1,
|
||||||
|
resourceIdParam: 'agent_id',
|
||||||
|
});
|
||||||
|
await middleware(req, res, next);
|
||||||
|
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
expect(res.status).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('permission levels', () => {
|
||||||
|
let agent;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
agent = await createAgent({
|
||||||
|
id: `agent_${Date.now()}`,
|
||||||
|
name: 'Permission Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry with all permissions for the owner
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 15, // All permissions (1+2+4+8)
|
||||||
|
grantedBy: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agent.id;
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should support view permission (1)', async () => {
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 1 });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should support edit permission (2)', async () => {
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 2 });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should support delete permission (4)', async () => {
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 4 });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should support share permission (8)', async () => {
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: 8 });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should support combined permissions', async () => {
|
||||||
|
const viewAndEdit = 1 | 2; // 3
|
||||||
|
const middleware = canAccessAgentResource({ requiredPermission: viewAndEdit });
|
||||||
|
await middleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('integration with agent operations', () => {
|
||||||
|
test('should work with agent CRUD operations', async () => {
|
||||||
|
const agentId = `agent_${Date.now()}`;
|
||||||
|
|
||||||
|
// Create agent
|
||||||
|
const agent = await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Integration Test Agent',
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-4',
|
||||||
|
author: testUser._id,
|
||||||
|
description: 'Testing integration',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create ACL entry for the author
|
||||||
|
await AclEntry.create({
|
||||||
|
principalType: 'user',
|
||||||
|
principalId: testUser._id,
|
||||||
|
principalModel: 'User',
|
||||||
|
resourceType: 'agent',
|
||||||
|
resourceId: agent._id,
|
||||||
|
permBits: 15, // All permissions
|
||||||
|
grantedBy: testUser._id,
|
||||||
|
});
|
||||||
|
|
||||||
|
req.params.id = agentId;
|
||||||
|
|
||||||
|
// Test view access
|
||||||
|
const viewMiddleware = canAccessAgentResource({ requiredPermission: 1 });
|
||||||
|
await viewMiddleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
jest.clearAllMocks();
|
||||||
|
|
||||||
|
// Update the agent
|
||||||
|
const { updateAgent } = require('~/models/Agent');
|
||||||
|
await updateAgent({ id: agentId }, { description: 'Updated description' });
|
||||||
|
|
||||||
|
// Test edit access
|
||||||
|
const editMiddleware = canAccessAgentResource({ requiredPermission: 2 });
|
||||||
|
await editMiddleware(req, res, next);
|
||||||
|
expect(next).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
157
api/server/middleware/accessResources/canAccessResource.js
Normal file
157
api/server/middleware/accessResources/canAccessResource.js
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
const { logger } = require('@librechat/data-schemas');
|
||||||
|
const { SystemRoles } = require('librechat-data-provider');
|
||||||
|
const { checkPermission } = require('~/server/services/PermissionService');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic base middleware factory that creates middleware to check resource access permissions.
|
||||||
|
* This middleware expects MongoDB ObjectIds as resource identifiers for ACL permission checks.
|
||||||
|
*
|
||||||
|
* @param {Object} options - Configuration options
|
||||||
|
* @param {string} options.resourceType - The type of resource (e.g., 'agent', 'file', 'project')
|
||||||
|
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
|
||||||
|
* @param {string} [options.resourceIdParam='resourceId'] - The name of the route parameter containing the resource ID
|
||||||
|
* @param {Function} [options.idResolver] - Optional function to resolve custom IDs to ObjectIds
|
||||||
|
* @returns {Function} Express middleware function
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Direct usage with ObjectId (for resources that use MongoDB ObjectId in routes)
|
||||||
|
* router.get('/prompts/:promptId',
|
||||||
|
* canAccessResource({ resourceType: 'prompt', requiredPermission: 1 }),
|
||||||
|
* getPrompt
|
||||||
|
* );
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* // Usage with custom ID resolver (for resources that use custom string IDs)
|
||||||
|
* router.get('/agents/:id',
|
||||||
|
* canAccessResource({
|
||||||
|
* resourceType: 'agent',
|
||||||
|
* requiredPermission: 1,
|
||||||
|
* resourceIdParam: 'id',
|
||||||
|
* idResolver: (customId) => resolveAgentId(customId)
|
||||||
|
* }),
|
||||||
|
* getAgent
|
||||||
|
* );
|
||||||
|
*/
|
||||||
|
const canAccessResource = (options) => {
|
||||||
|
const {
|
||||||
|
resourceType,
|
||||||
|
requiredPermission,
|
||||||
|
resourceIdParam = 'resourceId',
|
||||||
|
idResolver = null,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
if (!resourceType || typeof resourceType !== 'string') {
|
||||||
|
throw new Error('canAccessResource: resourceType is required and must be a string');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requiredPermission || typeof requiredPermission !== 'number') {
|
||||||
|
throw new Error('canAccessResource: requiredPermission is required and must be a number');
|
||||||
|
}
|
||||||
|
|
||||||
|
return async (req, res, next) => {
|
||||||
|
try {
|
||||||
|
// Extract resource ID from route parameters
|
||||||
|
const rawResourceId = req.params[resourceIdParam];
|
||||||
|
|
||||||
|
if (!rawResourceId) {
|
||||||
|
logger.warn(`[canAccessResource] Missing ${resourceIdParam} in route parameters`);
|
||||||
|
return res.status(400).json({
|
||||||
|
error: 'Bad Request',
|
||||||
|
message: `${resourceIdParam} is required`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user is authenticated
|
||||||
|
if (!req.user || !req.user.id) {
|
||||||
|
logger.warn(
|
||||||
|
`[canAccessResource] Unauthenticated request for ${resourceType} ${rawResourceId}`,
|
||||||
|
);
|
||||||
|
return res.status(401).json({
|
||||||
|
error: 'Unauthorized',
|
||||||
|
message: 'Authentication required',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// if system admin let through
|
||||||
|
if (req.user.role === SystemRoles.ADMIN) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
const userId = req.user.id;
|
||||||
|
let resourceId = rawResourceId;
|
||||||
|
let resourceInfo = null;
|
||||||
|
|
||||||
|
// Resolve custom ID to ObjectId if resolver is provided
|
||||||
|
if (idResolver) {
|
||||||
|
logger.debug(
|
||||||
|
`[canAccessResource] Resolving ${resourceType} custom ID ${rawResourceId} to ObjectId`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolutionResult = await idResolver(rawResourceId);
|
||||||
|
|
||||||
|
if (!resolutionResult) {
|
||||||
|
logger.warn(`[canAccessResource] ${resourceType} not found: ${rawResourceId}`);
|
||||||
|
return res.status(404).json({
|
||||||
|
error: 'Not Found',
|
||||||
|
message: `${resourceType} not found`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different resolver return formats
|
||||||
|
if (typeof resolutionResult === 'string' || resolutionResult._id) {
|
||||||
|
resourceId = resolutionResult._id || resolutionResult;
|
||||||
|
resourceInfo = typeof resolutionResult === 'object' ? resolutionResult : null;
|
||||||
|
} else {
|
||||||
|
resourceId = resolutionResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
`[canAccessResource] Resolved ${resourceType} ${rawResourceId} to ObjectId ${resourceId}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check permissions using PermissionService with ObjectId
|
||||||
|
const hasPermission = await checkPermission({
|
||||||
|
userId,
|
||||||
|
resourceType,
|
||||||
|
resourceId,
|
||||||
|
requiredPermission,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (hasPermission) {
|
||||||
|
logger.debug(
|
||||||
|
`[canAccessResource] User ${userId} has permission ${requiredPermission} on ${resourceType} ${rawResourceId} (${resourceId})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
req.resourceAccess = {
|
||||||
|
resourceType,
|
||||||
|
resourceId, // MongoDB ObjectId for ACL operations
|
||||||
|
customResourceId: rawResourceId, // Original ID from route params
|
||||||
|
permission: requiredPermission,
|
||||||
|
userId,
|
||||||
|
...(resourceInfo && { resourceInfo }),
|
||||||
|
};
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn(
|
||||||
|
`[canAccessResource] User ${userId} denied access to ${resourceType} ${rawResourceId} ` +
|
||||||
|
`(required permission: ${requiredPermission})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return res.status(403).json({
|
||||||
|
error: 'Forbidden',
|
||||||
|
message: `Insufficient permissions to access this ${resourceType}`,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[canAccessResource] Error checking access for ${resourceType}:`, error);
|
||||||
|
return res.status(500).json({
|
||||||
|
error: 'Internal Server Error',
|
||||||
|
message: 'Failed to check resource access permissions',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
canAccessResource,
|
||||||
|
};
|
||||||
9
api/server/middleware/accessResources/index.js
Normal file
9
api/server/middleware/accessResources/index.js
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
const { canAccessResource } = require('./canAccessResource');
|
||||||
|
const { canAccessAgentResource } = require('./canAccessAgentResource');
|
||||||
|
const { canAccessAgentFromBody } = require('./canAccessAgentFromBody');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
canAccessResource,
|
||||||
|
canAccessAgentResource,
|
||||||
|
canAccessAgentFromBody,
|
||||||
|
};
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user