Compare commits
134 Commits
feat/group
...
refactor/o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e9fb86ba8 | ||
|
|
6137a089b3 | ||
|
|
a928115a84 | ||
|
|
1b14721e75 | ||
|
|
f4301e3fb0 | ||
|
|
fc71c6b358 | ||
|
|
3acb000090 | ||
|
|
f6924567ce | ||
|
|
1fa055b4ec | ||
|
|
0e18faf44b | ||
|
|
74474815aa | ||
|
|
5bec40e574 | ||
|
|
71a3d97058 | ||
|
|
a21891f692 | ||
|
|
cbc38b8263 | ||
|
|
83e2766187 | ||
|
|
8d6110342f | ||
|
|
472c2f14e4 | ||
|
|
8d51f450e8 | ||
|
|
5a2eb74c2d | ||
|
|
fc8e8d2a3b | ||
|
|
d653e96209 | ||
|
|
6bb6d2044b | ||
|
|
76d75030b9 | ||
|
|
abc32e66ce | ||
|
|
2ee68f9e4a | ||
|
|
e49b49af6c | ||
|
|
052e61b735 | ||
|
|
1ccac58403 | ||
|
|
04d74a7e07 | ||
|
|
0fdca8ddbd | ||
|
|
c5ca621efd | ||
|
|
8cefa566da | ||
|
|
7e4c8a5d0d | ||
|
|
edf33bedcb | ||
|
|
21e00168b1 | ||
|
|
da3730b7d6 | ||
|
|
770c766d50 | ||
|
|
5eb6926464 | ||
|
|
e478ae1c28 | ||
|
|
0c9284c8ae | ||
|
|
4eeadddfe6 | ||
|
|
9ca1847535 | ||
|
|
5d0bc95193 | ||
|
|
e7d6100fe4 | ||
|
|
01a95229f2 | ||
|
|
0939250f07 | ||
|
|
7147bce3c3 | ||
|
|
486fe34a2b | ||
|
|
922f43f520 | ||
|
|
e6fa01d514 | ||
|
|
8238fb49e0 | ||
|
|
430557676d | ||
|
|
8a5047c456 | ||
|
|
c787515894 | ||
|
|
d95d8032cc | ||
|
|
b9f72f4869 | ||
|
|
429bb6653a | ||
|
|
47caafa8f8 | ||
|
|
8530594f37 | ||
|
|
0b071c06f6 | ||
|
|
1092392ed8 | ||
|
|
36c8947029 | ||
|
|
4175a3ea19 | ||
|
|
02dc71f4b7 | ||
|
|
a6c99a3267 | ||
|
|
fcefc6eedf | ||
|
|
dfdafdbd09 | ||
|
|
33834cd484 | ||
|
|
7ef2c626e2 | ||
|
|
bc43423f58 | ||
|
|
863401bcdf | ||
|
|
33c8b87edd | ||
|
|
077248a8a7 | ||
|
|
c6fb4686ef | ||
|
|
f1c6e4d55e | ||
|
|
e192c99c7d | ||
|
|
056172f007 | ||
|
|
5eed5009e9 | ||
|
|
6fc9abd4ad | ||
|
|
03a924eaca | ||
|
|
25c993d93e | ||
|
|
09659c1040 | ||
|
|
19a8f5c545 | ||
|
|
1050346915 | ||
|
|
8a1a38f346 | ||
|
|
32081245da | ||
|
|
6fd3b569ac | ||
|
|
6671fcb714 | ||
|
|
c4677ab3fb | ||
|
|
ef9d9b1276 | ||
|
|
a4ca4b7d9d | ||
|
|
8e6eef04ab | ||
|
|
ec3cbca6e3 | ||
|
|
4639dc3255 | ||
|
|
0ef3fefaec | ||
|
|
37aba18a96 | ||
|
|
2ce6ac74f4 | ||
|
|
9fddb0ff6a | ||
|
|
32f7dbd11f | ||
|
|
79197454f8 | ||
|
|
97e1cdd224 | ||
|
|
d6a65f5a08 | ||
|
|
f4facb7d35 | ||
|
|
545a909953 | ||
|
|
cd436dc6a8 | ||
|
|
e75beb92b3 | ||
|
|
5251246313 | ||
|
|
26f23c6aaf | ||
|
|
1636af1f27 | ||
|
|
b050a0bf1e | ||
|
|
deb928bf80 | ||
|
|
21005b66cc | ||
|
|
3dc9e85fab | ||
|
|
ec67cf2d3a | ||
|
|
1fe977e48f | ||
|
|
01470ef9fd | ||
|
|
bef5c26bed | ||
|
|
9e03fef9db | ||
|
|
283c9cff6f | ||
|
|
0aafdc0a86 | ||
|
|
365e3bca95 | ||
|
|
a01536ddb7 | ||
|
|
8a3ff62ee6 | ||
|
|
74d8a3824c | ||
|
|
62c3f135e7 | ||
|
|
baf3b4ad08 | ||
|
|
e5d08ccdf1 | ||
|
|
5178507b1c | ||
|
|
f797e90d79 | ||
|
|
259224d986 | ||
|
|
13789ab261 | ||
|
|
faaba30af1 | ||
|
|
14660d75ae |
40
.env.example
40
.env.example
@@ -15,6 +15,20 @@ HOST=localhost
|
||||
PORT=3080
|
||||
|
||||
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
|
||||
#The maximum number of connections in the connection pool. */
|
||||
MONGO_MAX_POOL_SIZE=
|
||||
#The minimum number of connections in the connection pool. */
|
||||
MONGO_MIN_POOL_SIZE=
|
||||
#The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
|
||||
MONGO_MAX_CONNECTING=
|
||||
#The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
|
||||
MONGO_MAX_IDLE_TIME_MS=
|
||||
#The maximum time in milliseconds that a thread can wait for a connection to become available. */
|
||||
MONGO_WAIT_QUEUE_TIMEOUT_MS=
|
||||
# Set to false to disable automatic index creation for all models associated with this connection. */
|
||||
MONGO_AUTO_INDEX=
|
||||
# Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
|
||||
MONGO_AUTO_CREATE=
|
||||
|
||||
DOMAIN_CLIENT=http://localhost:3080
|
||||
DOMAIN_SERVER=http://localhost:3080
|
||||
@@ -442,6 +456,8 @@ OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
||||
OPENID_USERNAME_CLAIM=
|
||||
# Set to determine which user info property returned from OpenID Provider to store as the User's name
|
||||
OPENID_NAME_CLAIM=
|
||||
# Optional audience parameter for OpenID authorization requests
|
||||
OPENID_AUDIENCE=
|
||||
|
||||
OPENID_BUTTON_LABEL=
|
||||
OPENID_IMAGE_URL=
|
||||
@@ -463,6 +479,21 @@ OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
#========================#
|
||||
# SharePoint Integration #
|
||||
#========================#
|
||||
# Requires Entra ID (OpenID) authentication to be configured
|
||||
|
||||
# Enable SharePoint file picker in chat and agent panels
|
||||
# ENABLE_SHAREPOINT_FILEPICKER=true
|
||||
|
||||
# SharePoint tenant base URL (e.g., https://yourtenant.sharepoint.com)
|
||||
# SHAREPOINT_BASE_URL=https://yourtenant.sharepoint.com
|
||||
|
||||
# Microsoft Graph API And SharePoint scopes for file picker
|
||||
# SHAREPOINT_PICKER_SHAREPOINT_SCOPE==https://yourtenant.sharepoint.com/AllSites.Read
|
||||
# SHAREPOINT_PICKER_GRAPH_SCOPE=Files.Read.All
|
||||
#========================#
|
||||
|
||||
# SAML
|
||||
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
|
||||
@@ -642,6 +673,15 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# Redis connection limits
|
||||
# REDIS_MAX_LISTENERS=40
|
||||
|
||||
# Redis ping interval in seconds (0 = disabled, >0 = enabled)
|
||||
# When set to a positive integer, Redis clients will ping the server at this interval to keep connections alive
|
||||
# When unset or 0, no pinging is performed (recommended for most use cases)
|
||||
# REDIS_PING_INTERVAL=300
|
||||
|
||||
# Force specific cache namespaces to use in-memory storage even when Redis is enabled
|
||||
# Comma-separated list of CacheKeys (e.g., STATIC_CONFIG,ROLES,MESSAGES)
|
||||
# FORCED_IN_MEMORY_CACHE_NAMESPACES=STATIC_CONFIG,ROLES
|
||||
|
||||
#==================================================#
|
||||
# Others #
|
||||
#==================================================#
|
||||
|
||||
2
.github/workflows/backend-review.yml
vendored
2
.github/workflows/backend-review.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
- release/*
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'packages/api/**'
|
||||
- 'packages/**'
|
||||
jobs:
|
||||
tests_Backend:
|
||||
name: Run Backend unit tests
|
||||
|
||||
46
.github/workflows/client.yml
vendored
46
.github/workflows/client.yml
vendored
@@ -1,6 +1,11 @@
|
||||
name: Publish `@librechat/client` to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'packages/client/package.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
@@ -17,16 +22,37 @@ jobs:
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.x'
|
||||
|
||||
- name: Check if client package exists
|
||||
node-version: '20.x'
|
||||
|
||||
- name: Install client dependencies
|
||||
run: cd packages/client && npm ci
|
||||
|
||||
- name: Build client
|
||||
run: cd packages/client && npm run build
|
||||
|
||||
- name: Set up npm authentication
|
||||
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
|
||||
|
||||
- name: Check version change
|
||||
id: check
|
||||
working-directory: packages/client
|
||||
run: |
|
||||
if [ -d "packages/client" ]; then
|
||||
echo "Client package directory found"
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
PUBLISHED_VERSION=$(npm view @librechat/client version 2>/dev/null || echo "0.0.0")
|
||||
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
|
||||
echo "No version change, skipping publish"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Client package directory not found - workflow ready for future use"
|
||||
exit 0
|
||||
echo "Version changed, proceeding with publish"
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Placeholder for future publishing
|
||||
run: echo "Client package publishing workflow is ready"
|
||||
|
||||
- name: Pack package
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/client
|
||||
run: npm pack
|
||||
|
||||
- name: Publish
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/client
|
||||
run: npm publish *.tgz --access public
|
||||
2
.github/workflows/data-schemas.yml
vendored
2
.github/workflows/data-schemas.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '18.x'
|
||||
node-version: '20.x'
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd packages/data-schemas && npm ci
|
||||
|
||||
2
.github/workflows/frontend-review.yml
vendored
2
.github/workflows/frontend-review.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
- release/*
|
||||
paths:
|
||||
- 'client/**'
|
||||
- 'packages/**'
|
||||
- 'packages/data-provider/**'
|
||||
|
||||
jobs:
|
||||
tests_frontend_ubuntu:
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
name: Generate Release Changelog PR
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-release-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository (with full history).
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# 2. Generate the release changelog using our custom configuration.
|
||||
- name: Generate Release Changelog
|
||||
id: generate_release
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-release.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-release.md
|
||||
|
||||
# 3. Update the main CHANGELOG.md:
|
||||
# - If it doesn't exist, create it with a basic header.
|
||||
# - Remove the "Unreleased" section (if present).
|
||||
# - Prepend the new release changelog above previous releases.
|
||||
# - Remove all temporary files before committing.
|
||||
- name: Update CHANGELOG.md
|
||||
run: |
|
||||
# Determine the release tag, e.g. "v1.2.3"
|
||||
TAG=${GITHUB_REF##*/}
|
||||
echo "Using release tag: $TAG"
|
||||
|
||||
# Ensure CHANGELOG.md exists; if not, create a basic header.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Remove the "Unreleased" section (from "## [Unreleased]" until the first occurrence of '---') if it exists.
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{flag=1} flag && /^---/{flag=0; next} !flag' CHANGELOG.md > CHANGELOG.cleaned
|
||||
else
|
||||
cp CHANGELOG.md CHANGELOG.cleaned
|
||||
fi
|
||||
|
||||
# Split the cleaned file into:
|
||||
# - header.md: content before the first release header ("## [v...").
|
||||
# - tail.md: content from the first release header onward.
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.cleaned > header.md
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.cleaned > tail.md
|
||||
|
||||
# Combine header, the new release changelog, and the tail.
|
||||
echo "Combining updated changelog parts..."
|
||||
cat header.md CHANGELOG-release.md > CHANGELOG.md.new
|
||||
echo "" >> CHANGELOG.md.new
|
||||
cat tail.md >> CHANGELOG.md.new
|
||||
|
||||
mv CHANGELOG.md.new CHANGELOG.md
|
||||
|
||||
# Remove temporary files.
|
||||
rm -f CHANGELOG.cleaned header.md tail.md CHANGELOG-release.md
|
||||
|
||||
echo "Final CHANGELOG.md content:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 4. Create (or update) the Pull Request with the updated CHANGELOG.md.
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
reviewers: danny-avila
|
||||
title: "📜 docs: Changelog for release ${{ github.ref_name }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
@@ -1,107 +0,0 @@
|
||||
name: Generate Unreleased Changelog PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Runs every Monday at 00:00 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-unreleased-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository on main.
|
||||
- name: Checkout Repository on Main
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
# 4. Get the latest version tag.
|
||||
- name: Get Latest Tag
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1) || echo "none")
|
||||
echo "Latest tag: $LATEST_TAG"
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
# 5. Generate the Unreleased changelog.
|
||||
- name: Generate Unreleased Changelog
|
||||
id: generate_unreleased
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-unreleased.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-unreleased.md
|
||||
fromTag: ${{ steps.get_latest_tag.outputs.tag }}
|
||||
toTag: main
|
||||
|
||||
# 7. Update CHANGELOG.md with the new Unreleased section.
|
||||
- name: Update CHANGELOG.md
|
||||
id: update_changelog
|
||||
run: |
|
||||
# Create CHANGELOG.md if it doesn't exist.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Extract content before the "## [Unreleased]" (or first version header if missing).
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
else
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
fi
|
||||
|
||||
# Append the generated Unreleased changelog.
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
cat CHANGELOG-unreleased.md >> CHANGELOG_TMP.md
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
|
||||
# Append the remainder of the original changelog (starting from the first version header).
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.md >> CHANGELOG_TMP.md
|
||||
|
||||
# Replace the old file with the updated file.
|
||||
mv CHANGELOG_TMP.md CHANGELOG.md
|
||||
|
||||
# Remove the temporary generated file.
|
||||
rm -f CHANGELOG-unreleased.md
|
||||
|
||||
echo "Final CHANGELOG.md:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 8. Check if CHANGELOG.md has any updates.
|
||||
- name: Check for CHANGELOG.md changes
|
||||
id: changelog_changes
|
||||
run: |
|
||||
if git diff --quiet CHANGELOG.md; then
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# 9. Create (or update) the Pull Request only if there are changes.
|
||||
- name: Create Pull Request
|
||||
if: steps.changelog_changes.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
base: main
|
||||
branch: "changelog/unreleased-update"
|
||||
sign-commits: true
|
||||
commit-message: "action: update Unreleased changelog"
|
||||
title: "📜 docs: Unreleased Changelog"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the Unreleased section in CHANGELOG.md.
|
||||
- It compares the current main branch with the latest version tag (determined as ${{ steps.get_latest_tag.outputs.tag }}),
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
53
.github/workflows/helmcharts.yml
vendored
53
.github/workflows/helmcharts.yml
vendored
@@ -4,12 +4,13 @@ name: Build Helm Charts on Tag
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
- "chart-*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -26,15 +27,49 @@ jobs:
|
||||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
cd helm/librechat
|
||||
helm dependency build
|
||||
cd ../librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
- name: Get Chart Version
|
||||
id: chart-version
|
||||
run: |
|
||||
CHART_VERSION=$(echo "${{ github.ref_name }}" | cut -d'-' -f2)
|
||||
echo "CHART_VERSION=${CHART_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
charts_dir: helm
|
||||
skip_existing: true
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Run Helm OCI Charts Releaser
|
||||
# This is for the librechat chart
|
||||
- name: Release Helm OCI Charts for librechat
|
||||
uses: appany/helm-oci-chart-releaser@v0.4.2
|
||||
with:
|
||||
name: librechat
|
||||
repository: ${{ github.actor }}/librechat-chart
|
||||
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
|
||||
path: helm/librechat
|
||||
registry: ghcr.io
|
||||
registry_username: ${{ github.actor }}
|
||||
registry_password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# this is for the librechat-rag-api chart
|
||||
- name: Release Helm OCI Charts for librechat-rag-api
|
||||
uses: appany/helm-oci-chart-releaser@v0.4.2
|
||||
with:
|
||||
name: librechat-rag-api
|
||||
repository: ${{ github.actor }}/librechat-chart
|
||||
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
|
||||
path: helm/librechat-rag-api
|
||||
registry: ghcr.io
|
||||
registry_username: ${{ github.actor }}
|
||||
registry_password: ${{ secrets.GITHUB_TOKEN }}
|
||||
3
.github/workflows/i18n-unused-keys.yml
vendored
3
.github/workflows/i18n-unused-keys.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- "client/src/**"
|
||||
- "api/**"
|
||||
- "packages/data-provider/src/**"
|
||||
- "packages/client/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
@@ -23,7 +24,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
|
||||
2
.github/workflows/locize-i18n-sync.yml
vendored
2
.github/workflows/locize-i18n-sync.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
|
||||
# 2. Download translation files from locize.
|
||||
- name: Download Translations from locize
|
||||
uses: locize/download@v1
|
||||
uses: locize/download@v2
|
||||
with:
|
||||
project-id: ${{ secrets.LOCIZE_PROJECT_ID }}
|
||||
path: "client/src/locales"
|
||||
|
||||
101
.github/workflows/unused-packages.yml
vendored
101
.github/workflows/unused-packages.yml
vendored
@@ -7,6 +7,7 @@ on:
|
||||
- 'package-lock.json'
|
||||
- 'client/**'
|
||||
- 'api/**'
|
||||
- 'packages/client/**'
|
||||
|
||||
jobs:
|
||||
detect-unused-packages:
|
||||
@@ -28,7 +29,7 @@ jobs:
|
||||
|
||||
- name: Validate JSON files
|
||||
run: |
|
||||
for FILE in package.json client/package.json api/package.json; do
|
||||
for FILE in package.json client/package.json api/package.json packages/client/package.json; do
|
||||
if [[ -f "$FILE" ]]; then
|
||||
jq empty "$FILE" || (echo "::error title=Invalid JSON::$FILE is invalid" && exit 1)
|
||||
fi
|
||||
@@ -63,12 +64,31 @@ jobs:
|
||||
local folder=$1
|
||||
local output_file=$2
|
||||
if [[ -d "$folder" ]]; then
|
||||
grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
# Extract require() statements
|
||||
grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)/\1/" > "$output_file"
|
||||
|
||||
grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
# Extract ES6 imports - various patterns
|
||||
# import x from 'module'
|
||||
grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# import 'module' (side-effect imports)
|
||||
grep -rEho "import ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/import ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# export { x } from 'module' or export * from 'module'
|
||||
grep -rEho "export .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/export .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# import type { x } from 'module' (TypeScript)
|
||||
grep -rEho "import type .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{ts,tsx} | \
|
||||
sed -E "s/import type .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# Remove subpath imports but keep the base package
|
||||
# e.g., '@tanstack/react-query/devtools' becomes '@tanstack/react-query'
|
||||
sed -i -E 's|^(@?[a-zA-Z0-9-]+(/[a-zA-Z0-9-]+)?)/.*|\1|' "$output_file"
|
||||
|
||||
sort -u "$output_file" -o "$output_file"
|
||||
else
|
||||
touch "$output_file"
|
||||
@@ -78,13 +98,80 @@ jobs:
|
||||
extract_deps_from_code "." root_used_code.txt
|
||||
extract_deps_from_code "client" client_used_code.txt
|
||||
extract_deps_from_code "api" api_used_code.txt
|
||||
|
||||
# Extract dependencies used by @librechat/client package
|
||||
extract_deps_from_code "packages/client" packages_client_used_code.txt
|
||||
|
||||
- name: Get @librechat/client dependencies
|
||||
id: get-librechat-client-deps
|
||||
run: |
|
||||
if [[ -f "packages/client/package.json" ]]; then
|
||||
# Get all dependencies from @librechat/client (dependencies, devDependencies, and peerDependencies)
|
||||
DEPS=$(jq -r '.dependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
DEV_DEPS=$(jq -r '.devDependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
PEER_DEPS=$(jq -r '.peerDependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
|
||||
# Combine all dependencies
|
||||
echo "$DEPS" > librechat_client_deps.txt
|
||||
echo "$DEV_DEPS" >> librechat_client_deps.txt
|
||||
echo "$PEER_DEPS" >> librechat_client_deps.txt
|
||||
|
||||
# Also include dependencies that are imported in packages/client
|
||||
cat packages_client_used_code.txt >> librechat_client_deps.txt
|
||||
|
||||
# Remove empty lines and sort
|
||||
grep -v '^$' librechat_client_deps.txt | sort -u > temp_deps.txt
|
||||
mv temp_deps.txt librechat_client_deps.txt
|
||||
else
|
||||
touch librechat_client_deps.txt
|
||||
fi
|
||||
|
||||
- name: Extract Workspace Dependencies
|
||||
id: extract-workspace-deps
|
||||
run: |
|
||||
# Function to get dependencies from a workspace package that are used by another package
|
||||
get_workspace_package_deps() {
|
||||
local package_json=$1
|
||||
local output_file=$2
|
||||
|
||||
# Get all workspace dependencies (starting with @librechat/)
|
||||
if [[ -f "$package_json" ]]; then
|
||||
local workspace_deps=$(jq -r '.dependencies // {} | to_entries[] | select(.key | startswith("@librechat/")) | .key' "$package_json" 2>/dev/null || echo "")
|
||||
|
||||
# For each workspace dependency, get its dependencies
|
||||
for dep in $workspace_deps; do
|
||||
# Convert @librechat/api to packages/api
|
||||
local workspace_path=$(echo "$dep" | sed 's/@librechat\//packages\//')
|
||||
local workspace_package_json="${workspace_path}/package.json"
|
||||
|
||||
if [[ -f "$workspace_package_json" ]]; then
|
||||
# Extract all dependencies from the workspace package
|
||||
jq -r '.dependencies // {} | keys[]' "$workspace_package_json" 2>/dev/null >> "$output_file"
|
||||
# Also extract peerDependencies
|
||||
jq -r '.peerDependencies // {} | keys[]' "$workspace_package_json" 2>/dev/null >> "$output_file"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -f "$output_file" ]]; then
|
||||
sort -u "$output_file" -o "$output_file"
|
||||
else
|
||||
touch "$output_file"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get workspace dependencies for each package
|
||||
get_workspace_package_deps "package.json" root_workspace_deps.txt
|
||||
get_workspace_package_deps "client/package.json" client_workspace_deps.txt
|
||||
get_workspace_package_deps "api/package.json" api_workspace_deps.txt
|
||||
|
||||
- name: Run depcheck for root package.json
|
||||
id: check-root
|
||||
run: |
|
||||
if [[ -f "package.json" ]]; then
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt root_workspace_deps.txt | sort) || echo "")
|
||||
echo "ROOT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
@@ -97,7 +184,8 @@ jobs:
|
||||
chmod -R 755 client
|
||||
cd client
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt ../client_workspace_deps.txt | sort) || echo "")
|
||||
# Filter out false positives
|
||||
UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "")
|
||||
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
@@ -113,7 +201,8 @@ jobs:
|
||||
chmod -R 755 api
|
||||
cd api
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt ../api_workspace_deps.txt | sort) || echo "")
|
||||
echo "API_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -13,6 +13,9 @@ pids
|
||||
*.seed
|
||||
.git
|
||||
|
||||
# CI/CD data
|
||||
test-image*
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.9-rc1
|
||||
# v0.8.0-rc2
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.9-rc1
|
||||
# v0.8.0-rc2
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
@@ -16,6 +16,7 @@ COPY package*.json ./
|
||||
COPY packages/data-provider/package*.json ./packages/data-provider/
|
||||
COPY packages/api/package*.json ./packages/api/
|
||||
COPY packages/data-schemas/package*.json ./packages/data-schemas/
|
||||
COPY packages/client/package*.json ./packages/client/
|
||||
COPY client/package*.json ./client/
|
||||
COPY api/package*.json ./api/
|
||||
|
||||
@@ -45,11 +46,19 @@ COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/d
|
||||
COPY --from=data-schemas-build /app/packages/data-schemas/dist /app/packages/data-schemas/dist
|
||||
RUN npm run build
|
||||
|
||||
# Build `client` package
|
||||
FROM base AS client-package-build
|
||||
WORKDIR /app/packages/client
|
||||
COPY packages/client ./
|
||||
RUN npm run build
|
||||
|
||||
# Client build
|
||||
FROM base AS client-build
|
||||
WORKDIR /app/client
|
||||
COPY client ./
|
||||
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
|
||||
COPY --from=client-package-build /app/packages/client/dist /app/packages/client/dist
|
||||
COPY --from=client-package-build /app/packages/client/src /app/packages/client/src
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
RUN npm run build
|
||||
|
||||
|
||||
@@ -1222,7 +1222,9 @@ ${convo}
|
||||
}
|
||||
|
||||
if (this.isOmni === true && modelOptions.max_tokens != null) {
|
||||
modelOptions.max_completion_tokens = modelOptions.max_tokens;
|
||||
const paramName =
|
||||
modelOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
|
||||
modelOptions[paramName] = modelOptions.max_tokens;
|
||||
delete modelOptions.max_tokens;
|
||||
}
|
||||
if (this.isOmni === true && modelOptions.temperature != null) {
|
||||
|
||||
@@ -3,8 +3,8 @@ const path = require('path');
|
||||
const OpenAI = require('openai');
|
||||
const fetch = require('node-fetch');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { Tool } = require('@langchain/core/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { FileContext, ContentTypes } = require('librechat-data-provider');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
@@ -46,7 +46,10 @@ class DALLE3 extends Tool {
|
||||
}
|
||||
|
||||
if (process.env.PROXY) {
|
||||
config.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
const proxyAgent = new ProxyAgent(process.env.PROXY);
|
||||
config.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
/** @type {OpenAI} */
|
||||
@@ -163,7 +166,8 @@ Error Message: ${error.message}`);
|
||||
if (this.isAgent) {
|
||||
let fetchOptions = {};
|
||||
if (process.env.PROXY) {
|
||||
fetchOptions.agent = new HttpsProxyAgent(process.env.PROXY);
|
||||
const proxyAgent = new ProxyAgent(process.env.PROXY);
|
||||
fetchOptions.dispatcher = proxyAgent;
|
||||
}
|
||||
const imageResponse = await fetch(theImageUrl, fetchOptions);
|
||||
const arrayBuffer = await imageResponse.arrayBuffer();
|
||||
|
||||
@@ -3,10 +3,10 @@ const axios = require('axios');
|
||||
const { v4 } = require('uuid');
|
||||
const OpenAI = require('openai');
|
||||
const FormData = require('form-data');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
@@ -189,7 +189,10 @@ function createOpenAIImageTools(fields = {}) {
|
||||
}
|
||||
const clientConfig = { ...closureConfig };
|
||||
if (process.env.PROXY) {
|
||||
clientConfig.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
const proxyAgent = new ProxyAgent(process.env.PROXY);
|
||||
clientConfig.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
/** @type {OpenAI} */
|
||||
@@ -335,7 +338,10 @@ Error Message: ${error.message}`);
|
||||
|
||||
const clientConfig = { ...closureConfig };
|
||||
if (process.env.PROXY) {
|
||||
clientConfig.httpAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
const proxyAgent = new ProxyAgent(process.env.PROXY);
|
||||
clientConfig.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
@@ -447,6 +453,19 @@ Error Message: ${error.message}`);
|
||||
baseURL,
|
||||
};
|
||||
|
||||
if (process.env.PROXY) {
|
||||
try {
|
||||
const url = new URL(process.env.PROXY);
|
||||
axiosConfig.proxy = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
port: url.port ? parseInt(url.port, 10) : undefined,
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Error parsing proxy URL:', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.IMAGE_GEN_OAI_AZURE_API_VERSION && process.env.IMAGE_GEN_OAI_BASEURL) {
|
||||
axiosConfig.params = {
|
||||
'api-version': process.env.IMAGE_GEN_OAI_AZURE_API_VERSION,
|
||||
|
||||
94
api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js
Normal file
94
api/app/clients/tools/structured/specs/DALLE3-proxy.spec.js
Normal file
@@ -0,0 +1,94 @@
|
||||
const DALLE3 = require('../DALLE3');
|
||||
const { ProxyAgent } = require('undici');
|
||||
|
||||
const processFileURL = jest.fn();
|
||||
|
||||
jest.mock('~/server/services/Files/images', () => ({
|
||||
getImageBasename: jest.fn().mockImplementation((url) => {
|
||||
const parts = url.split('/');
|
||||
const lastPart = parts.pop();
|
||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i;
|
||||
if (imageExtensionRegex.test(lastPart)) {
|
||||
return lastPart;
|
||||
}
|
||||
return '';
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('fs', () => {
|
||||
return {
|
||||
existsSync: jest.fn(),
|
||||
mkdirSync: jest.fn(),
|
||||
promises: {
|
||||
writeFile: jest.fn(),
|
||||
readFile: jest.fn(),
|
||||
unlink: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('path', () => {
|
||||
return {
|
||||
resolve: jest.fn(),
|
||||
join: jest.fn(),
|
||||
relative: jest.fn(),
|
||||
extname: jest.fn().mockImplementation((filename) => {
|
||||
return filename.slice(filename.lastIndexOf('.'));
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('DALLE3 Proxy Configuration', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeAll(() => {
|
||||
originalEnv = { ...process.env };
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should configure ProxyAgent in fetchOptions.dispatcher when PROXY env is set', () => {
|
||||
// Set proxy environment variable
|
||||
process.env.PROXY = 'http://proxy.example.com:8080';
|
||||
process.env.DALLE_API_KEY = 'test-api-key';
|
||||
|
||||
// Create instance
|
||||
const dalleWithProxy = new DALLE3({ processFileURL });
|
||||
|
||||
// Check that the openai client exists
|
||||
expect(dalleWithProxy.openai).toBeDefined();
|
||||
|
||||
// Check that _options exists and has fetchOptions with a dispatcher
|
||||
expect(dalleWithProxy.openai._options).toBeDefined();
|
||||
expect(dalleWithProxy.openai._options.fetchOptions).toBeDefined();
|
||||
expect(dalleWithProxy.openai._options.fetchOptions.dispatcher).toBeDefined();
|
||||
expect(dalleWithProxy.openai._options.fetchOptions.dispatcher).toBeInstanceOf(ProxyAgent);
|
||||
});
|
||||
|
||||
it('should not configure ProxyAgent when PROXY env is not set', () => {
|
||||
// Ensure PROXY is not set
|
||||
delete process.env.PROXY;
|
||||
process.env.DALLE_API_KEY = 'test-api-key';
|
||||
|
||||
// Create instance
|
||||
const dalleWithoutProxy = new DALLE3({ processFileURL });
|
||||
|
||||
// Check that the openai client exists
|
||||
expect(dalleWithoutProxy.openai).toBeDefined();
|
||||
|
||||
// Check that _options exists but fetchOptions either doesn't exist or doesn't have a dispatcher
|
||||
expect(dalleWithoutProxy.openai._options).toBeDefined();
|
||||
|
||||
// fetchOptions should either not exist or not have a dispatcher
|
||||
if (dalleWithoutProxy.openai._options.fetchOptions) {
|
||||
expect(dalleWithoutProxy.openai._options.fetchOptions.dispatcher).toBeUndefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -30,7 +30,12 @@ const primeFiles = async (options) => {
|
||||
// Filter by access if user and agent are provided
|
||||
let dbFiles;
|
||||
if (req?.user?.id && agentId) {
|
||||
dbFiles = await filterFilesByAgentAccess(allFiles, req.user.id, agentId);
|
||||
dbFiles = await filterFilesByAgentAccess({
|
||||
files: allFiles,
|
||||
userId: req.user.id,
|
||||
role: req.user.role,
|
||||
agentId,
|
||||
});
|
||||
} else {
|
||||
dbFiles = allFiles;
|
||||
}
|
||||
@@ -120,11 +125,13 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
}
|
||||
|
||||
const formattedResults = validResults
|
||||
.flatMap((result) =>
|
||||
.flatMap((result, fileIndex) =>
|
||||
result.data.map(([docInfo, distance]) => ({
|
||||
filename: docInfo.metadata.source.split('/').pop(),
|
||||
content: docInfo.page_content,
|
||||
distance,
|
||||
file_id: files[fileIndex]?.file_id,
|
||||
page: docInfo.metadata.page || null,
|
||||
})),
|
||||
)
|
||||
// TODO: results should be sorted by relevance, not distance
|
||||
@@ -134,18 +141,37 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
|
||||
const formattedString = formattedResults
|
||||
.map(
|
||||
(result) =>
|
||||
`File: ${result.filename}\nRelevance: ${1.0 - result.distance.toFixed(4)}\nContent: ${
|
||||
(result, index) =>
|
||||
`File: ${result.filename}\nAnchor: \\ue202turn0file${index} (${result.filename})\nRelevance: ${(1.0 - result.distance).toFixed(4)}\nContent: ${
|
||||
result.content
|
||||
}\n`,
|
||||
)
|
||||
.join('\n---\n');
|
||||
|
||||
return formattedString;
|
||||
const sources = formattedResults.map((result) => ({
|
||||
type: 'file',
|
||||
fileId: result.file_id,
|
||||
content: result.content,
|
||||
fileName: result.filename,
|
||||
relevance: 1.0 - result.distance,
|
||||
pages: result.page ? [result.page] : [],
|
||||
pageRelevance: result.page ? { [result.page]: 1.0 - result.distance } : {},
|
||||
}));
|
||||
|
||||
return [formattedString, { [Tools.file_search]: { sources } }];
|
||||
},
|
||||
{
|
||||
name: Tools.file_search,
|
||||
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.`,
|
||||
responseFormat: 'content_and_artifact',
|
||||
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.
|
||||
|
||||
**CITE FILE SEARCH RESULTS:**
|
||||
Use anchor markers immediately after statements derived from file content. Reference the filename in your text:
|
||||
- File citation: "The document.pdf states that... \\ue202turn0file0"
|
||||
- Page reference: "According to report.docx... \\ue202turn0file1"
|
||||
- Multi-file: "Multiple sources confirm... \\ue200\\ue202turn0file0\\ue202turn0file1\\ue201"
|
||||
|
||||
**ALWAYS mention the filename in your text before the citation marker. NEVER use markdown links or footnotes.**`,
|
||||
schema: z.object({
|
||||
query: z
|
||||
.string()
|
||||
|
||||
@@ -230,7 +230,7 @@ const loadTools = async ({
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const toolContextMap = {};
|
||||
const appTools = (await getCachedTools({ includeGlobal: true })) ?? {};
|
||||
const cachedTools = (await getCachedTools({ userId: user, includeGlobal: true })) ?? {};
|
||||
|
||||
for (const tool of tools) {
|
||||
if (tool === Tools.execute_code) {
|
||||
@@ -298,7 +298,7 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool && appTools[tool] && mcpToolPattern.test(tool)) {
|
||||
} else if (tool && cachedTools && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
req: options.req,
|
||||
|
||||
29
api/cache/cacheConfig.js
vendored
29
api/cache/cacheConfig.js
vendored
@@ -1,5 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const { math, isEnabled } = require('@librechat/api');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
// To ensure that different deployments do not interfere with each other's cache, we use a prefix for the Redis keys.
|
||||
// This prefix is usually the deployment ID, which is often passed to the container or pod as an env var.
|
||||
@@ -15,7 +16,26 @@ if (USE_REDIS && !process.env.REDIS_URI) {
|
||||
throw new Error('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
}
|
||||
|
||||
// Comma-separated list of cache namespaces that should be forced to use in-memory storage
|
||||
// even when Redis is enabled. This allows selective performance optimization for specific caches.
|
||||
const FORCED_IN_MEMORY_CACHE_NAMESPACES = process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES
|
||||
? process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES.split(',').map((key) => key.trim())
|
||||
: [];
|
||||
|
||||
// Validate against CacheKeys enum
|
||||
if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
||||
const validKeys = Object.values(CacheKeys);
|
||||
const invalidKeys = FORCED_IN_MEMORY_CACHE_NAMESPACES.filter((key) => !validKeys.includes(key));
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
throw new Error(
|
||||
`Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: ${invalidKeys.join(', ')}. Valid keys: ${validKeys.join(', ')}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const cacheConfig = {
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES,
|
||||
USE_REDIS,
|
||||
REDIS_URI: process.env.REDIS_URI,
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME,
|
||||
@@ -23,6 +43,15 @@ const cacheConfig = {
|
||||
REDIS_CA: process.env.REDIS_CA ? fs.readFileSync(process.env.REDIS_CA, 'utf8') : null,
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_MAX_LISTENERS: math(process.env.REDIS_MAX_LISTENERS, 40),
|
||||
REDIS_PING_INTERVAL: math(process.env.REDIS_PING_INTERVAL, 0),
|
||||
/** Max delay between reconnection attempts in ms */
|
||||
REDIS_RETRY_MAX_DELAY: math(process.env.REDIS_RETRY_MAX_DELAY, 3000),
|
||||
/** Max number of reconnection attempts (0 = infinite) */
|
||||
REDIS_RETRY_MAX_ATTEMPTS: math(process.env.REDIS_RETRY_MAX_ATTEMPTS, 10),
|
||||
/** Connection timeout in ms */
|
||||
REDIS_CONNECT_TIMEOUT: math(process.env.REDIS_CONNECT_TIMEOUT, 10000),
|
||||
/** Queue commands when disconnected */
|
||||
REDIS_ENABLE_OFFLINE_QUEUE: isEnabled(process.env.REDIS_ENABLE_OFFLINE_QUEUE ?? 'true'),
|
||||
|
||||
CI: isEnabled(process.env.CI),
|
||||
DEBUG_MEMORY_CACHE: isEnabled(process.env.DEBUG_MEMORY_CACHE),
|
||||
|
||||
49
api/cache/cacheConfig.spec.js
vendored
49
api/cache/cacheConfig.spec.js
vendored
@@ -14,6 +14,8 @@ describe('cacheConfig', () => {
|
||||
delete process.env.REDIS_KEY_PREFIX_VAR;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Clear require cache
|
||||
jest.resetModules();
|
||||
@@ -105,4 +107,51 @@ describe('cacheConfig', () => {
|
||||
expect(cacheConfig.REDIS_CA).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_PING_INTERVAL configuration', () => {
|
||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(0);
|
||||
});
|
||||
|
||||
test('should use provided REDIS_PING_INTERVAL value', () => {
|
||||
process.env.REDIS_PING_INTERVAL = '300';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(300);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
||||
test('should parse comma-separated cache keys correctly', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, STATIC_CONFIG ,MESSAGES ';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([
|
||||
'ROLES',
|
||||
'STATIC_CONFIG',
|
||||
'MESSAGES',
|
||||
]);
|
||||
});
|
||||
|
||||
test('should throw error for invalid cache keys', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'INVALID_KEY,ROLES';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
||||
});
|
||||
|
||||
test('should handle empty string gracefully', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = '';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle undefined env var gracefully', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
66
api/cache/cacheFactory.js
vendored
66
api/cache/cacheFactory.js
vendored
@@ -1,12 +1,13 @@
|
||||
const KeyvRedis = require('@keyv/redis').default;
|
||||
const { Keyv } = require('keyv');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { RedisStore: ConnectRedis } = require('connect-redis');
|
||||
const MemoryStore = require('memorystore')(require('express-session'));
|
||||
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { violationFile } = require('./keyvFiles');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
|
||||
/**
|
||||
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
|
||||
@@ -16,12 +17,25 @@ const { RedisStore } = require('rate-limit-redis');
|
||||
* @returns {Keyv} Cache instance.
|
||||
*/
|
||||
const standardCache = (namespace, ttl = undefined, fallbackStore = undefined) => {
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
||||
return cache;
|
||||
if (
|
||||
cacheConfig.USE_REDIS &&
|
||||
!cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)
|
||||
) {
|
||||
try {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
||||
|
||||
cache.on('error', (err) => {
|
||||
logger.error(`Cache error in namespace ${namespace}:`, err);
|
||||
});
|
||||
|
||||
return cache;
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (fallbackStore) return new Keyv({ store: fallbackStore, namespace, ttl });
|
||||
return new Keyv({ namespace, ttl });
|
||||
@@ -47,7 +61,13 @@ const violationCache = (namespace, ttl = undefined) => {
|
||||
const sessionCache = (namespace, ttl = undefined) => {
|
||||
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
|
||||
if (!cacheConfig.USE_REDIS) return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
|
||||
return new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
||||
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
||||
if (ioredisClient) {
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
|
||||
});
|
||||
}
|
||||
return store;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -59,8 +79,30 @@ const limiterCache = (prefix) => {
|
||||
if (!prefix) throw new Error('prefix is required');
|
||||
if (!cacheConfig.USE_REDIS) return undefined;
|
||||
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
|
||||
return new RedisStore({ sendCommand, prefix });
|
||||
|
||||
try {
|
||||
if (!ioredisClient) {
|
||||
logger.warn(`Redis client not available for rate limiter with prefix ${prefix}`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return new RedisStore({ sendCommand, prefix });
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const sendCommand = (...args) => {
|
||||
if (!ioredisClient) {
|
||||
logger.warn('Redis client not available for command execution');
|
||||
return Promise.reject(new Error('Redis client not available'));
|
||||
}
|
||||
|
||||
return ioredisClient.call(...args).catch((err) => {
|
||||
logger.error('Redis command execution failed:', err);
|
||||
throw err;
|
||||
});
|
||||
};
|
||||
const sendCommand = (...args) => ioredisClient?.call(...args);
|
||||
|
||||
module.exports = { standardCache, sessionCache, violationCache, limiterCache };
|
||||
|
||||
168
api/cache/cacheFactory.spec.js
vendored
168
api/cache/cacheFactory.spec.js
vendored
@@ -6,13 +6,17 @@ const mockKeyvRedis = {
|
||||
keyPrefixSeparator: '',
|
||||
};
|
||||
|
||||
const mockKeyv = jest.fn().mockReturnValue({ mock: 'keyv' });
|
||||
const mockKeyv = jest.fn().mockReturnValue({
|
||||
mock: 'keyv',
|
||||
on: jest.fn(),
|
||||
});
|
||||
const mockConnectRedis = jest.fn().mockReturnValue({ mock: 'connectRedis' });
|
||||
const mockMemoryStore = jest.fn().mockReturnValue({ mock: 'memoryStore' });
|
||||
const mockRedisStore = jest.fn().mockReturnValue({ mock: 'redisStore' });
|
||||
|
||||
const mockIoredisClient = {
|
||||
call: jest.fn(),
|
||||
on: jest.fn(),
|
||||
};
|
||||
|
||||
const mockKeyvRedisClient = {};
|
||||
@@ -31,6 +35,7 @@ jest.mock('./cacheConfig', () => ({
|
||||
cacheConfig: {
|
||||
USE_REDIS: false,
|
||||
REDIS_KEY_PREFIX: 'test',
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES: [],
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -52,6 +57,14 @@ jest.mock('rate-limit-redis', () => ({
|
||||
RedisStore: mockRedisStore,
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
info: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Import after mocking
|
||||
const { standardCache, sessionCache, violationCache, limiterCache } = require('./cacheFactory');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
@@ -63,6 +76,7 @@ describe('cacheFactory', () => {
|
||||
// Reset cache config mock
|
||||
cacheConfig.USE_REDIS = false;
|
||||
cacheConfig.REDIS_KEY_PREFIX = 'test';
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = [];
|
||||
});
|
||||
|
||||
describe('redisCache', () => {
|
||||
@@ -116,6 +130,52 @@ describe('cacheFactory', () => {
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace: undefined, ttl: undefined });
|
||||
});
|
||||
|
||||
it('should use fallback when namespace is in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['forced-memory'];
|
||||
const namespace = 'forced-memory';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).not.toHaveBeenCalled();
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
||||
});
|
||||
|
||||
it('should use Redis when namespace is not in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['other-namespace'];
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
||||
});
|
||||
|
||||
it('should throw error when Redis cache creation fails', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
const testError = new Error('Redis connection failed');
|
||||
|
||||
const KeyvRedis = require('@keyv/redis').default;
|
||||
KeyvRedis.mockImplementationOnce(() => {
|
||||
throw testError;
|
||||
});
|
||||
|
||||
expect(() => standardCache(namespace, ttl)).toThrow('Redis connection failed');
|
||||
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
`Failed to create Redis cache for namespace ${namespace}:`,
|
||||
testError,
|
||||
);
|
||||
|
||||
expect(mockKeyv).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('violationCache', () => {
|
||||
@@ -207,6 +267,86 @@ describe('cacheFactory', () => {
|
||||
checkPeriod: Time.ONE_DAY,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error when ConnectRedis constructor fails', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
const ttl = 86400;
|
||||
|
||||
// Mock ConnectRedis to throw an error during construction
|
||||
const redisError = new Error('Redis connection failed');
|
||||
mockConnectRedis.mockImplementationOnce(() => {
|
||||
throw redisError;
|
||||
});
|
||||
|
||||
// The error should propagate up, not be caught
|
||||
expect(() => sessionCache(namespace, ttl)).toThrow('Redis connection failed');
|
||||
|
||||
// Verify that MemoryStore was NOT used as fallback
|
||||
expect(mockMemoryStore).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should register error handler but let errors propagate to Express', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
|
||||
// Create a mock session store with middleware methods
|
||||
const mockSessionStore = {
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
};
|
||||
mockConnectRedis.mockReturnValue(mockSessionStore);
|
||||
|
||||
const store = sessionCache(namespace);
|
||||
|
||||
// Verify error handler was registered
|
||||
expect(mockIoredisClient.on).toHaveBeenCalledWith('error', expect.any(Function));
|
||||
|
||||
// Get the error handler
|
||||
const errorHandler = mockIoredisClient.on.mock.calls.find((call) => call[0] === 'error')[1];
|
||||
|
||||
// Simulate an error from Redis during a session operation
|
||||
const redisError = new Error('Socket closed unexpectedly');
|
||||
|
||||
// The error handler should log but not swallow the error
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
errorHandler(redisError);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
`Session store Redis error for namespace ${namespace}::`,
|
||||
redisError,
|
||||
);
|
||||
|
||||
// Now simulate what happens when session middleware tries to use the store
|
||||
const callback = jest.fn();
|
||||
mockSessionStore.get.mockImplementation((sid, cb) => {
|
||||
cb(new Error('Redis connection lost'));
|
||||
});
|
||||
|
||||
// Call the store's get method (as Express session would)
|
||||
store.get('test-session-id', callback);
|
||||
|
||||
// The error should be passed to the callback, not swallowed
|
||||
expect(callback).toHaveBeenCalledWith(new Error('Redis connection lost'));
|
||||
});
|
||||
|
||||
it('should handle null ioredisClient gracefully', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
|
||||
// Temporarily set ioredisClient to null (simulating connection not established)
|
||||
const originalClient = require('./redisClients').ioredisClient;
|
||||
require('./redisClients').ioredisClient = null;
|
||||
|
||||
// ConnectRedis might accept null client but would fail on first use
|
||||
// The important thing is it doesn't throw uncaught exceptions during construction
|
||||
const store = sessionCache(namespace);
|
||||
expect(store).toBeDefined();
|
||||
|
||||
// Restore original client
|
||||
require('./redisClients').ioredisClient = originalClient;
|
||||
});
|
||||
});
|
||||
|
||||
describe('limiterCache', () => {
|
||||
@@ -248,8 +388,10 @@ describe('cacheFactory', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass sendCommand function that calls ioredisClient.call', () => {
|
||||
it('should pass sendCommand function that calls ioredisClient.call', async () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
mockIoredisClient.call.mockResolvedValue('test-value');
|
||||
|
||||
limiterCache('rate-limit');
|
||||
|
||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
||||
@@ -257,9 +399,29 @@ describe('cacheFactory', () => {
|
||||
|
||||
// Test that sendCommand properly delegates to ioredisClient.call
|
||||
const args = ['GET', 'test-key'];
|
||||
sendCommand(...args);
|
||||
const result = await sendCommand(...args);
|
||||
|
||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
||||
expect(result).toBe('test-value');
|
||||
});
|
||||
|
||||
it('should handle sendCommand errors properly', async () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
|
||||
// Mock the call method to reject with an error
|
||||
const testError = new Error('Redis error');
|
||||
mockIoredisClient.call.mockRejectedValue(testError);
|
||||
|
||||
limiterCache('rate-limit');
|
||||
|
||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
||||
const sendCommand = sendCommandCall.sendCommand;
|
||||
|
||||
// Test that sendCommand properly handles errors
|
||||
const args = ['GET', 'test-key'];
|
||||
|
||||
await expect(sendCommand(...args)).rejects.toThrow('Redis error');
|
||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
||||
});
|
||||
|
||||
it('should handle undefined prefix', () => {
|
||||
|
||||
1
api/cache/getLogStores.js
vendored
1
api/cache/getLogStores.js
vendored
@@ -33,6 +33,7 @@ const namespaces = {
|
||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||
[CacheKeys.MCP_TOOLS]: standardCache(CacheKeys.MCP_TOOLS),
|
||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }),
|
||||
[CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES),
|
||||
|
||||
173
api/cache/redisClients.js
vendored
173
api/cache/redisClients.js
vendored
@@ -1,6 +1,7 @@
|
||||
const IoRedis = require('ioredis');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createClient, createCluster } = require('@keyv/redis');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
|
||||
const GLOBAL_PREFIX_SEPARATOR = '::';
|
||||
|
||||
@@ -12,31 +13,136 @@ const ca = cacheConfig.REDIS_CA;
|
||||
/** @type {import('ioredis').Redis | import('ioredis').Cluster | null} */
|
||||
let ioredisClient = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
/** @type {import('ioredis').RedisOptions | import('ioredis').ClusterOptions} */
|
||||
const redisOptions = {
|
||||
username: username,
|
||||
password: password,
|
||||
tls: ca ? { ca } : undefined,
|
||||
keyPrefix: `${cacheConfig.REDIS_KEY_PREFIX}${GLOBAL_PREFIX_SEPARATOR}`,
|
||||
maxListeners: cacheConfig.REDIS_MAX_LISTENERS,
|
||||
retryStrategy: (times) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 50, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
reconnectOnError: (err) => {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
logger.warn('ioredis reconnecting due to READONLY error');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
connectTimeout: cacheConfig.REDIS_CONNECT_TIMEOUT,
|
||||
maxRetriesPerRequest: 3,
|
||||
};
|
||||
|
||||
ioredisClient =
|
||||
urls.length === 1
|
||||
? new IoRedis(cacheConfig.REDIS_URI, redisOptions)
|
||||
: new IoRedis.Cluster(cacheConfig.REDIS_URI, { redisOptions });
|
||||
: new IoRedis.Cluster(cacheConfig.REDIS_URI, {
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis cluster giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis cluster reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
});
|
||||
|
||||
// Pinging the Redis server every 5 minutes to keep the connection alive
|
||||
const pingInterval = setInterval(() => ioredisClient.ping(), 5 * 60 * 1000);
|
||||
ioredisClient.on('close', () => clearInterval(pingInterval));
|
||||
ioredisClient.on('end', () => clearInterval(pingInterval));
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error('ioredis client error:', err);
|
||||
});
|
||||
|
||||
ioredisClient.on('connect', () => {
|
||||
logger.info('ioredis client connected');
|
||||
});
|
||||
|
||||
ioredisClient.on('ready', () => {
|
||||
logger.info('ioredis client ready');
|
||||
});
|
||||
|
||||
ioredisClient.on('reconnecting', (delay) => {
|
||||
logger.info(`ioredis client reconnecting in ${delay}ms`);
|
||||
});
|
||||
|
||||
ioredisClient.on('close', () => {
|
||||
logger.warn('ioredis client connection closed');
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
ioredisClient.ping().catch((err) => {
|
||||
logger.error('ioredis ping failed:', err);
|
||||
});
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
ioredisClient.on('close', clearPingInterval);
|
||||
ioredisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {import('@keyv/redis').RedisClient | import('@keyv/redis').RedisCluster | null} */
|
||||
let keyvRedisClient = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
// ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
// The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
const redisOptions = { username, password, socket: { tls: ca != null, ca } };
|
||||
/**
|
||||
* ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
* The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
* @type {import('@keyv/redis').RedisClientOptions | import('@keyv/redis').RedisClusterOptions}
|
||||
*/
|
||||
const redisOptions = {
|
||||
username,
|
||||
password,
|
||||
socket: {
|
||||
tls: ca != null,
|
||||
ca,
|
||||
connectTimeout: cacheConfig.REDIS_CONNECT_TIMEOUT,
|
||||
reconnectStrategy: (retries) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
retries > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`@keyv/redis client giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return new Error('Max reconnection attempts reached');
|
||||
}
|
||||
const delay = Math.min(retries * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`@keyv/redis reconnecting... attempt ${retries}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
},
|
||||
disableOfflineQueue: !cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
};
|
||||
|
||||
keyvRedisClient =
|
||||
urls.length === 1
|
||||
@@ -48,10 +154,51 @@ if (cacheConfig.USE_REDIS) {
|
||||
|
||||
keyvRedisClient.setMaxListeners(cacheConfig.REDIS_MAX_LISTENERS);
|
||||
|
||||
// Pinging the Redis server every 5 minutes to keep the connection alive
|
||||
const keyvPingInterval = setInterval(() => keyvRedisClient.ping(), 5 * 60 * 1000);
|
||||
keyvRedisClient.on('disconnect', () => clearInterval(keyvPingInterval));
|
||||
keyvRedisClient.on('end', () => clearInterval(keyvPingInterval));
|
||||
keyvRedisClient.on('error', (err) => {
|
||||
logger.error('@keyv/redis client error:', err);
|
||||
});
|
||||
|
||||
keyvRedisClient.on('connect', () => {
|
||||
logger.info('@keyv/redis client connected');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('ready', () => {
|
||||
logger.info('@keyv/redis client ready');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('reconnecting', () => {
|
||||
logger.info('@keyv/redis client reconnecting...');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('disconnect', () => {
|
||||
logger.warn('@keyv/redis client disconnected');
|
||||
});
|
||||
|
||||
keyvRedisClient.connect().catch((err) => {
|
||||
logger.error('@keyv/redis initial connection failed:', err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (keyvRedisClient && keyvRedisClient.isReady) {
|
||||
keyvRedisClient.ping().catch((err) => {
|
||||
logger.error('@keyv/redis ping failed:', err);
|
||||
});
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
keyvRedisClient.on('disconnect', clearPingInterval);
|
||||
keyvRedisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ioredisClient, keyvRedisClient, GLOBAL_PREFIX_SEPARATOR };
|
||||
|
||||
@@ -1,11 +1,34 @@
|
||||
require('dotenv').config();
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const MONGO_URI = process.env.MONGO_URI;
|
||||
|
||||
if (!MONGO_URI) {
|
||||
throw new Error('Please define the MONGO_URI environment variable');
|
||||
}
|
||||
/** The maximum number of connections in the connection pool. */
|
||||
const maxPoolSize = parseInt(process.env.MONGO_MAX_POOL_SIZE) || undefined;
|
||||
/** The minimum number of connections in the connection pool. */
|
||||
const minPoolSize = parseInt(process.env.MONGO_MIN_POOL_SIZE) || undefined;
|
||||
/** The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
|
||||
const maxConnecting = parseInt(process.env.MONGO_MAX_CONNECTING) || undefined;
|
||||
/** The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
|
||||
const maxIdleTimeMS = parseInt(process.env.MONGO_MAX_IDLE_TIME_MS) || undefined;
|
||||
/** The maximum time in milliseconds that a thread can wait for a connection to become available. */
|
||||
const waitQueueTimeoutMS = parseInt(process.env.MONGO_WAIT_QUEUE_TIMEOUT_MS) || undefined;
|
||||
/** Set to false to disable automatic index creation for all models associated with this connection. */
|
||||
const autoIndex =
|
||||
process.env.MONGO_AUTO_INDEX != undefined
|
||||
? isEnabled(process.env.MONGO_AUTO_INDEX) || false
|
||||
: undefined;
|
||||
|
||||
/** Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
|
||||
const autoCreate =
|
||||
process.env.MONGO_AUTO_CREATE != undefined
|
||||
? isEnabled(process.env.MONGO_AUTO_CREATE) || false
|
||||
: undefined;
|
||||
/**
|
||||
* Global is used here to maintain a cached connection across hot reloads
|
||||
* in development. This prevents connections growing exponentially
|
||||
@@ -26,13 +49,21 @@ async function connectDb() {
|
||||
if (!cached.promise || disconnected) {
|
||||
const opts = {
|
||||
bufferCommands: false,
|
||||
...(maxPoolSize ? { maxPoolSize } : {}),
|
||||
...(minPoolSize ? { minPoolSize } : {}),
|
||||
...(maxConnecting ? { maxConnecting } : {}),
|
||||
...(maxIdleTimeMS ? { maxIdleTimeMS } : {}),
|
||||
...(waitQueueTimeoutMS ? { waitQueueTimeoutMS } : {}),
|
||||
...(autoIndex != undefined ? { autoIndex } : {}),
|
||||
...(autoCreate != undefined ? { autoCreate } : {}),
|
||||
// useNewUrlParser: true,
|
||||
// useUnifiedTopology: true,
|
||||
// bufferMaxEntries: 0,
|
||||
// useFindAndModify: true,
|
||||
// useCreateIndex: true
|
||||
};
|
||||
|
||||
logger.info('Mongo Connection options');
|
||||
logger.info(JSON.stringify(opts, null, 2));
|
||||
mongoose.set('strictQuery', true);
|
||||
cached.promise = mongoose.connect(MONGO_URI, opts).then((mongoose) => {
|
||||
return mongoose;
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
const mongoose = require('mongoose');
|
||||
const crypto = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
// Default category value for new agents
|
||||
const {
|
||||
getProjectByName,
|
||||
addAgentIdsToProject,
|
||||
removeAgentIdsFromProject,
|
||||
removeAgentFromAllProjects,
|
||||
removeAgentIdsFromProject,
|
||||
addAgentIdsToProject,
|
||||
getProjectByName,
|
||||
} = require('./Project');
|
||||
const { removeAllPermissions } = require('~/server/services/PermissionService');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
|
||||
// Category values are now imported from shared constants
|
||||
// Schema fields (category, support_contact, is_promoted) are defined in @librechat/data-schemas
|
||||
const { getActions } = require('./Action');
|
||||
const { Agent } = require('~/db/models');
|
||||
|
||||
@@ -65,7 +62,7 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
|
||||
const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
|
||||
const { model, ...model_parameters } = _m;
|
||||
/** @type {Record<string, FunctionTool>} */
|
||||
const availableTools = await getCachedTools({ includeGlobal: true });
|
||||
const availableTools = await getCachedTools({ userId: req.user.id, includeGlobal: true });
|
||||
/** @type {TEphemeralAgent | null} */
|
||||
const ephemeralAgent = req.body.ephemeralAgent;
|
||||
const mcpServers = new Set(ephemeralAgent?.mcp);
|
||||
@@ -367,17 +364,10 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
if (shouldCreateVersion) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||
if (duplicateVersion && !forceVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
// No changes detected, return the current agent without creating a new version
|
||||
const agentObj = currentAgent.toObject();
|
||||
agentObj.version = versions.length;
|
||||
return agentObj;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -516,6 +506,10 @@ const deleteAgent = async (searchParameter) => {
|
||||
const agent = await Agent.findOneAndDelete(searchParameter);
|
||||
if (agent) {
|
||||
await removeAgentFromAllProjects(agent.id);
|
||||
await removeAllPermissions({
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
});
|
||||
}
|
||||
return agent;
|
||||
};
|
||||
@@ -539,11 +533,7 @@ const getListAgentsByAccess = async ({
|
||||
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||
|
||||
// Build base query combining ACL accessible agents with other filters
|
||||
const baseQuery = { ...otherParams };
|
||||
|
||||
if (accessibleIds.length > 0) {
|
||||
baseQuery._id = { $in: accessibleIds };
|
||||
}
|
||||
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||
|
||||
// Add cursor condition
|
||||
if (after) {
|
||||
|
||||
@@ -14,6 +14,7 @@ const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
|
||||
const {
|
||||
getAgent,
|
||||
loadAgent,
|
||||
@@ -21,13 +22,16 @@ const {
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
getListAgentsByAccess,
|
||||
revertAgentVersion,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
generateActionMetadataHash,
|
||||
revertAgentVersion,
|
||||
} = require('./Agent');
|
||||
const permissionService = require('~/server/services/PermissionService');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { AclEntry } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
||||
@@ -407,12 +411,26 @@ describe('models/Agent', () => {
|
||||
|
||||
describe('Agent CRUD Operations', () => {
|
||||
let mongoServer;
|
||||
let AccessRole;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
|
||||
// Initialize models
|
||||
const dbModels = require('~/db/models');
|
||||
AccessRole = dbModels.AccessRole;
|
||||
|
||||
// Create necessary access roles for agents
|
||||
await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||
name: 'Owner',
|
||||
description: 'Full control over agents',
|
||||
resourceType: ResourceType.AGENT,
|
||||
permBits: 15, // VIEW | EDIT | DELETE | SHARE
|
||||
});
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -468,6 +486,51 @@ describe('models/Agent', () => {
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should remove ACL entries when deleting an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
// Create agent
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent With Permissions',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
|
||||
// Grant permissions (simulating sharing)
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: authorId,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
// Verify ACL entry exists
|
||||
const aclEntriesBefore = await AclEntry.find({
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
});
|
||||
expect(aclEntriesBefore).toHaveLength(1);
|
||||
|
||||
// Delete the agent
|
||||
await deleteAgent({ id: agentId });
|
||||
|
||||
// Verify agent is deleted
|
||||
const agentAfterDelete = await getAgent({ id: agentId });
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
|
||||
// Verify ACL entries are removed
|
||||
const aclEntriesAfter = await AclEntry.find({
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
});
|
||||
expect(aclEntriesAfter).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should list agents by author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||
@@ -879,45 +942,31 @@ describe('models/Agent', () => {
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
test('should not create new version for duplicate updates', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const testCases = generateVersionTestCases();
|
||||
|
||||
try {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const testCases = generateVersionTestCases();
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
const updatedAgent = await updateAgent({ id: testAgentId }, testCase.update);
|
||||
expect(updatedAgent.versions).toHaveLength(2); // No new version created
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
// Update with duplicate data should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1093,20 +1142,13 @@ describe('models/Agent', () => {
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
// Update without forceVersion and no changes should not create a version
|
||||
let error;
|
||||
try {
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(duplicateUpdate.versions).toHaveLength(3); // No new version created
|
||||
});
|
||||
|
||||
test('should handle isDuplicateVersion with arrays containing null/undefined values', async () => {
|
||||
@@ -1304,18 +1346,21 @@ describe('models/Agent', () => {
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
expect(secondUpdate.support_contact.email).toBe('updated@support.com');
|
||||
|
||||
// Try to update with same support_contact - should be detected as duplicate
|
||||
await expect(
|
||||
updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'Updated Support',
|
||||
email: 'updated@support.com',
|
||||
},
|
||||
// Try to update with same support_contact - should be detected as duplicate but return successfully
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'Updated Support',
|
||||
email: 'updated@support.com',
|
||||
},
|
||||
),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
},
|
||||
);
|
||||
|
||||
// Should not create a new version
|
||||
expect(duplicateUpdate.versions).toHaveLength(3);
|
||||
expect(duplicateUpdate.version).toBe(3);
|
||||
expect(duplicateUpdate.support_contact.email).toBe('updated@support.com');
|
||||
});
|
||||
|
||||
test('should handle support_contact from empty to populated', async () => {
|
||||
@@ -1499,18 +1544,22 @@ describe('models/Agent', () => {
|
||||
expect(updated.support_contact.name).toBe('New Name');
|
||||
expect(updated.support_contact.email).toBe('');
|
||||
|
||||
// Verify isDuplicateVersion works with partial changes
|
||||
await expect(
|
||||
updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'New Name',
|
||||
email: '',
|
||||
},
|
||||
// Verify isDuplicateVersion works with partial changes - should return successfully without creating new version
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'New Name',
|
||||
email: '',
|
||||
},
|
||||
),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
},
|
||||
);
|
||||
|
||||
// Should not create a new version since content is the same
|
||||
expect(duplicateUpdate.versions).toHaveLength(2);
|
||||
expect(duplicateUpdate.version).toBe(2);
|
||||
expect(duplicateUpdate.support_contact.name).toBe('New Name');
|
||||
expect(duplicateUpdate.support_contact.email).toBe('');
|
||||
});
|
||||
|
||||
// Edge Cases
|
||||
@@ -2730,11 +2779,18 @@ describe('models/Agent', () => {
|
||||
agent_ids: ['agent1', 'agent2'],
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] });
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
|
||||
);
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
|
||||
await expect(
|
||||
updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] }),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
// Update with same agent_ids should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
|
||||
);
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
});
|
||||
|
||||
test('should handle agent_ids field alongside other fields', async () => {
|
||||
@@ -2873,9 +2929,10 @@ describe('models/Agent', () => {
|
||||
expect(updated.versions).toHaveLength(2);
|
||||
expect(updated.agent_ids).toEqual([]);
|
||||
|
||||
await expect(updateAgent({ id: agentId }, { agent_ids: [] })).rejects.toThrow(
|
||||
'Duplicate version',
|
||||
);
|
||||
// Update with same empty agent_ids should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent({ id: agentId }, { agent_ids: [] });
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
expect(duplicateUpdate.agent_ids).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle agent without agent_ids field', async () => {
|
||||
@@ -2985,6 +3042,212 @@ describe('Support Contact Field', () => {
|
||||
// Verify support_contact is undefined when not provided
|
||||
expect(agent.support_contact).toBeUndefined();
|
||||
});
|
||||
|
||||
describe('getListAgentsByAccess - Security Tests', () => {
|
||||
let userA, userB;
|
||||
let agentA1, agentA2, agentA3;
|
||||
|
||||
beforeEach(async () => {
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await Agent.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
|
||||
// Create two users
|
||||
userA = new mongoose.Types.ObjectId();
|
||||
userB = new mongoose.Types.ObjectId();
|
||||
|
||||
// Create agents for user A
|
||||
agentA1 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
|
||||
agentA2 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
|
||||
agentA3 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
});
|
||||
|
||||
test('should return empty list when user has no accessible agents (empty accessibleIds)', async () => {
|
||||
// User B has no agents and no shared agents
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: [],
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
expect(result.first_id).toBeNull();
|
||||
expect(result.last_id).toBeNull();
|
||||
});
|
||||
|
||||
test('should not return other users agents when accessibleIds is empty', async () => {
|
||||
// User B trying to list agents with empty accessibleIds should not see User A's agents
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: [],
|
||||
otherParams: { author: userB },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should only return agents in accessibleIds list', async () => {
|
||||
// Give User B access to only one of User A's agents
|
||||
const accessibleIds = [agentA1._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentA1.id);
|
||||
expect(result.data[0].name).toBe('Agent A1');
|
||||
});
|
||||
|
||||
test('should return multiple accessible agents when provided', async () => {
|
||||
// Give User B access to two of User A's agents
|
||||
const accessibleIds = [agentA1._id, agentA3._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(2);
|
||||
const returnedIds = result.data.map((agent) => agent.id);
|
||||
expect(returnedIds).toContain(agentA1.id);
|
||||
expect(returnedIds).toContain(agentA3.id);
|
||||
expect(returnedIds).not.toContain(agentA2.id);
|
||||
});
|
||||
|
||||
test('should respect other query parameters while enforcing accessibleIds', async () => {
|
||||
// Give access to all agents but filter by name
|
||||
const accessibleIds = [agentA1._id, agentA2._id, agentA3._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: { name: 'Agent A2' },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentA2.id);
|
||||
});
|
||||
|
||||
test('should handle pagination correctly with accessibleIds filter', async () => {
|
||||
// Create more agents
|
||||
const moreAgents = [];
|
||||
for (let i = 4; i <= 10; i++) {
|
||||
const agent = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
moreAgents.push(agent);
|
||||
}
|
||||
|
||||
// Give access to all agents
|
||||
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
|
||||
|
||||
// First page
|
||||
const page1 = await getListAgentsByAccess({
|
||||
accessibleIds: allAgentIds,
|
||||
otherParams: {},
|
||||
limit: 5,
|
||||
});
|
||||
|
||||
expect(page1.data).toHaveLength(5);
|
||||
expect(page1.has_more).toBe(true);
|
||||
expect(page1.after).toBeTruthy();
|
||||
|
||||
// Second page
|
||||
const page2 = await getListAgentsByAccess({
|
||||
accessibleIds: allAgentIds,
|
||||
otherParams: {},
|
||||
limit: 5,
|
||||
after: page1.after,
|
||||
});
|
||||
|
||||
expect(page2.data).toHaveLength(5);
|
||||
expect(page2.has_more).toBe(false);
|
||||
|
||||
// Verify no overlap between pages
|
||||
const page1Ids = page1.data.map((a) => a.id);
|
||||
const page2Ids = page2.data.map((a) => a.id);
|
||||
const intersection = page1Ids.filter((id) => page2Ids.includes(id));
|
||||
expect(intersection).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should return empty list when accessibleIds contains non-existent IDs', async () => {
|
||||
// Try with non-existent agent IDs
|
||||
const fakeIds = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: fakeIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle undefined accessibleIds as empty array', async () => {
|
||||
// When accessibleIds is undefined, it should be treated as empty array
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: undefined,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should combine accessibleIds with author filter correctly', async () => {
|
||||
// Create an agent for User B
|
||||
const agentB1 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userB,
|
||||
});
|
||||
|
||||
// Give User B access to one of User A's agents
|
||||
const accessibleIds = [agentA1._id, agentB1._id];
|
||||
|
||||
// Filter by author should further restrict the results
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: { author: userB },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentB1.id);
|
||||
expect(result.data[0].author).toBe(userB.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function createBasicAgent(overrides = {}) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
const { Conversation } = require('~/db/models');
|
||||
|
||||
|
||||
572
api/models/Conversation.spec.js
Normal file
572
api/models/Conversation.spec.js
Normal file
@@ -0,0 +1,572 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
deleteNullOrEmptyConversations,
|
||||
searchConversation,
|
||||
getConvosByCursor,
|
||||
getConvosQueried,
|
||||
getConvoFiles,
|
||||
getConvoTitle,
|
||||
deleteConvos,
|
||||
saveConvo,
|
||||
getConvo,
|
||||
} = require('./Conversation');
|
||||
jest.mock('~/server/services/Config/getCustomConfig');
|
||||
jest.mock('./Message');
|
||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
|
||||
const { Conversation } = require('~/db/models');
|
||||
|
||||
describe('Conversation Operations', () => {
|
||||
let mongoServer;
|
||||
let mockReq;
|
||||
let mockConversationData;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clear database
|
||||
await Conversation.deleteMany({});
|
||||
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Default mock implementations
|
||||
getMessages.mockResolvedValue([]);
|
||||
deleteMessages.mockResolvedValue({ deletedCount: 0 });
|
||||
|
||||
mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {},
|
||||
};
|
||||
|
||||
mockConversationData = {
|
||||
conversationId: uuidv4(),
|
||||
title: 'Test Conversation',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
};
|
||||
});
|
||||
|
||||
describe('saveConvo', () => {
|
||||
it('should save a conversation for an authenticated user', async () => {
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.user).toBe('user123');
|
||||
expect(result.title).toBe('Test Conversation');
|
||||
expect(result.endpoint).toBe(EModelEndpoint.openAI);
|
||||
|
||||
// Verify the conversation was actually saved to the database
|
||||
const savedConvo = await Conversation.findOne({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
});
|
||||
expect(savedConvo).toBeTruthy();
|
||||
expect(savedConvo.title).toBe('Test Conversation');
|
||||
});
|
||||
|
||||
it('should query messages when saving a conversation', async () => {
|
||||
// Mock messages as ObjectIds
|
||||
const mongoose = require('mongoose');
|
||||
const mockMessages = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
|
||||
getMessages.mockResolvedValue(mockMessages);
|
||||
|
||||
await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
// Verify that getMessages was called with correct parameters
|
||||
expect(getMessages).toHaveBeenCalledWith(
|
||||
{ conversationId: mockConversationData.conversationId },
|
||||
'_id',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle newConversationId when provided', async () => {
|
||||
const newConversationId = uuidv4();
|
||||
const result = await saveConvo(mockReq, {
|
||||
...mockConversationData,
|
||||
newConversationId,
|
||||
});
|
||||
|
||||
expect(result.conversationId).toBe(newConversationId);
|
||||
});
|
||||
|
||||
it('should handle unsetFields metadata', async () => {
|
||||
const metadata = {
|
||||
unsetFields: { someField: 1 },
|
||||
};
|
||||
|
||||
await saveConvo(mockReq, mockConversationData, metadata);
|
||||
|
||||
const savedConvo = await Conversation.findOne({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
});
|
||||
expect(savedConvo.someField).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isTemporary conversation handling', () => {
|
||||
it('should save a conversation with expiredAt when isTemporary is true', async () => {
|
||||
// Mock custom config with 24 hour retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
const afterSave = new Date();
|
||||
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||
|
||||
// Verify expiredAt is approximately 24 hours in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 24 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
new Date(afterSave.getTime() + 24 * 60 * 60 * 1000 + 1000).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should save a conversation without expiredAt when isTemporary is false', async () => {
|
||||
mockReq.body = { isTemporary: false };
|
||||
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should save a conversation without expiredAt when isTemporary is not provided', async () => {
|
||||
// No isTemporary in body
|
||||
mockReq.body = {};
|
||||
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use custom retention period from config', async () => {
|
||||
// Mock custom config with 48 hour retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 48,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 48 hours in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 48 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle minimum retention period (1 hour)', async () => {
|
||||
// Mock custom config with less than minimum retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 1 hour in the future (minimum)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 1 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle maximum retention period (8760 hours)', async () => {
|
||||
// Mock custom config with more than maximum retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 8760 hours (1 year) in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 8760 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle getCustomConfig errors gracefully', async () => {
|
||||
// Mock getCustomConfig to throw an error
|
||||
getCustomConfig.mockRejectedValue(new Error('Config service unavailable'));
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
// Should still save the conversation but with expiredAt as null
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use default retention when config is not provided', async () => {
|
||||
// Mock getCustomConfig to return empty config
|
||||
getCustomConfig.mockResolvedValue({});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Default retention is 30 days (720 hours)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 30 * 24 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should update expiredAt when saving existing temporary conversation', async () => {
|
||||
// First save a temporary conversation
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const firstSave = await saveConvo(mockReq, mockConversationData);
|
||||
const originalExpiredAt = firstSave.expiredAt;
|
||||
|
||||
// Wait a bit to ensure time difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Save again with same conversationId but different title
|
||||
const updatedData = { ...mockConversationData, title: 'Updated Title' };
|
||||
const secondSave = await saveConvo(mockReq, updatedData);
|
||||
|
||||
// Should update title and create new expiredAt
|
||||
expect(secondSave.title).toBe('Updated Title');
|
||||
expect(secondSave.expiredAt).toBeDefined();
|
||||
expect(new Date(secondSave.expiredAt).getTime()).toBeGreaterThan(
|
||||
new Date(originalExpiredAt).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not set expiredAt when updating non-temporary conversation', async () => {
|
||||
// First save a non-temporary conversation
|
||||
mockReq.body = { isTemporary: false };
|
||||
const firstSave = await saveConvo(mockReq, mockConversationData);
|
||||
expect(firstSave.expiredAt).toBeNull();
|
||||
|
||||
// Update without isTemporary flag
|
||||
mockReq.body = {};
|
||||
const updatedData = { ...mockConversationData, title: 'Updated Title' };
|
||||
const secondSave = await saveConvo(mockReq, updatedData);
|
||||
|
||||
expect(secondSave.title).toBe('Updated Title');
|
||||
expect(secondSave.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should filter out expired conversations in getConvosByCursor', async () => {
|
||||
// Create some test conversations
|
||||
const nonExpiredConvo = await Conversation.create({
|
||||
conversationId: uuidv4(),
|
||||
user: 'user123',
|
||||
title: 'Non-expired',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
expiredAt: null,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await Conversation.create({
|
||||
conversationId: uuidv4(),
|
||||
user: 'user123',
|
||||
title: 'Future expired',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
expiredAt: new Date(Date.now() + 24 * 60 * 60 * 1000), // 24 hours from now
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
// Mock Meili search
|
||||
Conversation.meiliSearch = jest.fn().mockResolvedValue({ hits: [] });
|
||||
|
||||
const result = await getConvosByCursor('user123');
|
||||
|
||||
// Should only return conversations with null or non-existent expiredAt
|
||||
expect(result.conversations).toHaveLength(1);
|
||||
expect(result.conversations[0].conversationId).toBe(nonExpiredConvo.conversationId);
|
||||
});
|
||||
|
||||
it('should filter out expired conversations in getConvosQueried', async () => {
|
||||
// Create test conversations
|
||||
const nonExpiredConvo = await Conversation.create({
|
||||
conversationId: uuidv4(),
|
||||
user: 'user123',
|
||||
title: 'Non-expired',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
expiredAt: null,
|
||||
});
|
||||
|
||||
const expiredConvo = await Conversation.create({
|
||||
conversationId: uuidv4(),
|
||||
user: 'user123',
|
||||
title: 'Expired',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
expiredAt: new Date(Date.now() + 24 * 60 * 60 * 1000),
|
||||
});
|
||||
|
||||
const convoIds = [
|
||||
{ conversationId: nonExpiredConvo.conversationId },
|
||||
{ conversationId: expiredConvo.conversationId },
|
||||
];
|
||||
|
||||
const result = await getConvosQueried('user123', convoIds);
|
||||
|
||||
// Should only return the non-expired conversation
|
||||
expect(result.conversations).toHaveLength(1);
|
||||
expect(result.conversations[0].conversationId).toBe(nonExpiredConvo.conversationId);
|
||||
expect(result.convoMap[nonExpiredConvo.conversationId]).toBeDefined();
|
||||
expect(result.convoMap[expiredConvo.conversationId]).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('searchConversation', () => {
|
||||
it('should find a conversation by conversationId', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
title: 'Test',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
const result = await searchConversation(mockConversationData.conversationId);
|
||||
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.user).toBe('user123');
|
||||
expect(result.title).toBeUndefined(); // Only returns conversationId and user
|
||||
});
|
||||
|
||||
it('should return null if conversation not found', async () => {
|
||||
const result = await searchConversation('non-existent-id');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConvo', () => {
|
||||
it('should retrieve a conversation for a user', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
title: 'Test Conversation',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
const result = await getConvo('user123', mockConversationData.conversationId);
|
||||
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.user).toBe('user123');
|
||||
expect(result.title).toBe('Test Conversation');
|
||||
});
|
||||
|
||||
it('should return null if conversation not found', async () => {
|
||||
const result = await getConvo('user123', 'non-existent-id');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConvoTitle', () => {
|
||||
it('should return the conversation title', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
title: 'Test Title',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
const result = await getConvoTitle('user123', mockConversationData.conversationId);
|
||||
expect(result).toBe('Test Title');
|
||||
});
|
||||
|
||||
it('should return null if conversation has no title', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
title: null,
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
const result = await getConvoTitle('user123', mockConversationData.conversationId);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return "New Chat" if conversation not found', async () => {
|
||||
const result = await getConvoTitle('user123', 'non-existent-id');
|
||||
expect(result).toBe('New Chat');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConvoFiles', () => {
|
||||
it('should return conversation files', async () => {
|
||||
const files = ['file1', 'file2'];
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
files,
|
||||
});
|
||||
|
||||
const result = await getConvoFiles(mockConversationData.conversationId);
|
||||
expect(result).toEqual(files);
|
||||
});
|
||||
|
||||
it('should return empty array if no files', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
const result = await getConvoFiles(mockConversationData.conversationId);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array if conversation not found', async () => {
|
||||
const result = await getConvoFiles('non-existent-id');
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteConvos', () => {
|
||||
it('should delete conversations and associated messages', async () => {
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user123',
|
||||
title: 'To Delete',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
deleteMessages.mockResolvedValue({ deletedCount: 5 });
|
||||
|
||||
const result = await deleteConvos('user123', {
|
||||
conversationId: mockConversationData.conversationId,
|
||||
});
|
||||
|
||||
expect(result.deletedCount).toBe(1);
|
||||
expect(result.messages.deletedCount).toBe(5);
|
||||
expect(deleteMessages).toHaveBeenCalledWith({
|
||||
conversationId: { $in: [mockConversationData.conversationId] },
|
||||
});
|
||||
|
||||
// Verify conversation was deleted
|
||||
const deletedConvo = await Conversation.findOne({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
});
|
||||
expect(deletedConvo).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw error if no conversations found', async () => {
|
||||
await expect(deleteConvos('user123', { conversationId: 'non-existent' })).rejects.toThrow(
|
||||
'Conversation not found or already deleted.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteNullOrEmptyConversations', () => {
|
||||
it('should delete conversations with null, empty, or missing conversationIds', async () => {
|
||||
// Since conversationId is required by the schema, we can't create documents with null/missing IDs
|
||||
// This test should verify the function works when such documents exist (e.g., from data corruption)
|
||||
|
||||
// For this test, let's create a valid conversation and verify the function doesn't delete it
|
||||
await Conversation.create({
|
||||
conversationId: mockConversationData.conversationId,
|
||||
user: 'user4',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
|
||||
deleteMessages.mockResolvedValue({ deletedCount: 0 });
|
||||
|
||||
const result = await deleteNullOrEmptyConversations();
|
||||
|
||||
expect(result.conversations.deletedCount).toBe(0); // No invalid conversations to delete
|
||||
expect(result.messages.deletedCount).toBe(0);
|
||||
|
||||
// Verify valid conversation remains
|
||||
const remainingConvos = await Conversation.find({});
|
||||
expect(remainingConvos).toHaveLength(1);
|
||||
expect(remainingConvos[0].conversationId).toBe(mockConversationData.conversationId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle database errors in saveConvo', async () => {
|
||||
// Force a database error by disconnecting
|
||||
await mongoose.disconnect();
|
||||
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
expect(result).toEqual({ message: 'Error saving conversation' });
|
||||
|
||||
// Reconnect for other tests
|
||||
await mongoose.connect(mongoServer.getUri());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,17 +1,22 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { createModels } = require('@librechat/data-schemas');
|
||||
const { getFiles, createFile } = require('./File');
|
||||
const { createAgent } = require('./Agent');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
} = require('librechat-data-provider');
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
const { getFiles, createFile } = require('./File');
|
||||
const { seedDefaultRoles } = require('~/models');
|
||||
const { createAgent } = require('./Agent');
|
||||
|
||||
let File;
|
||||
let Agent;
|
||||
let AclEntry;
|
||||
let User;
|
||||
let AccessRole;
|
||||
let modelsToCleanup = [];
|
||||
|
||||
describe('File Access Control', () => {
|
||||
@@ -36,7 +41,6 @@ describe('File Access Control', () => {
|
||||
Agent = dbModels.Agent;
|
||||
AclEntry = dbModels.AclEntry;
|
||||
User = dbModels.User;
|
||||
AccessRole = dbModels.AccessRole;
|
||||
|
||||
// Seed default roles
|
||||
await seedDefaultRoles();
|
||||
@@ -116,17 +120,22 @@ describe('File Access Control', () => {
|
||||
|
||||
// Grant EDIT permission to user on the agent
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
// Check access for all files
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMap = await hasAccessToFilesViaAgent(userId.toString(), fileIds, agentId);
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
agentId: agent.id, // Use agent.id which is the custom UUID
|
||||
});
|
||||
|
||||
// Should have access only to the first two files
|
||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||
@@ -149,7 +158,7 @@ describe('File Access Control', () => {
|
||||
});
|
||||
|
||||
// Create agent
|
||||
const agent = await createAgent({
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
@@ -164,7 +173,12 @@ describe('File Access Control', () => {
|
||||
|
||||
// Check access as the author
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMap = await hasAccessToFilesViaAgent(authorId.toString(), fileIds, agentId);
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: authorId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
agentId,
|
||||
});
|
||||
|
||||
// Author should have access to all files
|
||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||
@@ -185,11 +199,12 @@ describe('File Access Control', () => {
|
||||
});
|
||||
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMap = await hasAccessToFilesViaAgent(
|
||||
userId.toString(),
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
'non-existent-agent',
|
||||
);
|
||||
agentId: 'non-existent-agent',
|
||||
});
|
||||
|
||||
// Should have no access to any files
|
||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||
@@ -233,17 +248,22 @@ describe('File Access Control', () => {
|
||||
|
||||
// Grant only VIEW permission to user on the agent
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_viewer',
|
||||
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
// Check access for files
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMap = await hasAccessToFilesViaAgent(userId.toString(), fileIds, agentId);
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
agentId,
|
||||
});
|
||||
|
||||
// Should have no access to any files when only VIEW permission
|
||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||
@@ -291,11 +311,11 @@ describe('File Access Control', () => {
|
||||
|
||||
// Grant EDIT permission to user on the agent
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
@@ -337,7 +357,12 @@ describe('File Access Control', () => {
|
||||
|
||||
// Then filter by access control
|
||||
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
|
||||
const files = await filterFilesByAgentAccess(allFiles, userId.toString(), agentId);
|
||||
const files = await filterFilesByAgentAccess({
|
||||
files: allFiles,
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
agentId,
|
||||
});
|
||||
|
||||
expect(files).toHaveLength(2);
|
||||
expect(files.map((f) => f.file_id)).toContain(ownedFileId);
|
||||
@@ -372,4 +397,166 @@ describe('File Access Control', () => {
|
||||
expect(files).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Role-based file permissions', () => {
|
||||
it('should optimize permission checks when role is provided', async () => {
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const agentId = uuidv4();
|
||||
const fileIds = [uuidv4(), uuidv4()];
|
||||
|
||||
// Create users
|
||||
await User.create({
|
||||
_id: userId,
|
||||
email: 'user@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
role: 'ADMIN', // User has ADMIN role
|
||||
});
|
||||
|
||||
await User.create({
|
||||
_id: authorId,
|
||||
email: 'author@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
});
|
||||
|
||||
// Create files
|
||||
for (const fileId of fileIds) {
|
||||
await createFile({
|
||||
file_id: fileId,
|
||||
user: authorId,
|
||||
filename: `${fileId}.txt`,
|
||||
filepath: `/uploads/${fileId}.txt`,
|
||||
type: 'text/plain',
|
||||
bytes: 100,
|
||||
});
|
||||
}
|
||||
|
||||
// Create agent with files
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: fileIds,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Grant permission to ADMIN role
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.ROLE,
|
||||
principalId: 'ADMIN',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
// Check access with role provided (should avoid DB query)
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMapWithRole = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: 'ADMIN',
|
||||
fileIds,
|
||||
agentId: agent.id,
|
||||
});
|
||||
|
||||
// User should have access through their ADMIN role
|
||||
expect(accessMapWithRole.get(fileIds[0])).toBe(true);
|
||||
expect(accessMapWithRole.get(fileIds[1])).toBe(true);
|
||||
|
||||
// Check access without role (will query DB to get user's role)
|
||||
const accessMapWithoutRole = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
fileIds,
|
||||
agentId: agent.id,
|
||||
});
|
||||
|
||||
// Should have same result
|
||||
expect(accessMapWithoutRole.get(fileIds[0])).toBe(true);
|
||||
expect(accessMapWithoutRole.get(fileIds[1])).toBe(true);
|
||||
});
|
||||
|
||||
it('should deny access when user role changes', async () => {
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const agentId = uuidv4();
|
||||
const fileId = uuidv4();
|
||||
|
||||
// Create users
|
||||
await User.create({
|
||||
_id: userId,
|
||||
email: 'user@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
role: 'EDITOR',
|
||||
});
|
||||
|
||||
await User.create({
|
||||
_id: authorId,
|
||||
email: 'author@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
});
|
||||
|
||||
// Create file
|
||||
await createFile({
|
||||
file_id: fileId,
|
||||
user: authorId,
|
||||
filename: 'test.txt',
|
||||
filepath: '/uploads/test.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 100,
|
||||
});
|
||||
|
||||
// Create agent
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileId],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Grant permission to EDITOR role only
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.ROLE,
|
||||
principalId: 'EDITOR',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
|
||||
// Check with EDITOR role - should have access
|
||||
const accessAsEditor = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: 'EDITOR',
|
||||
fileIds: [fileId],
|
||||
agentId: agent.id,
|
||||
});
|
||||
expect(accessAsEditor.get(fileId)).toBe(true);
|
||||
|
||||
// Simulate role change to USER - should lose access
|
||||
const accessAsUser = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds: [fileId],
|
||||
agentId: agent.id,
|
||||
});
|
||||
expect(accessAsUser.get(fileId)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
const idSchema = z.string().uuid();
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
|
||||
const {
|
||||
saveMessage,
|
||||
getMessages,
|
||||
updateMessage,
|
||||
deleteMessages,
|
||||
bulkSaveMessages,
|
||||
updateMessageText,
|
||||
deleteMessagesSince,
|
||||
} = require('./Message');
|
||||
|
||||
jest.mock('~/server/services/Config/getCustomConfig');
|
||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
||||
*/
|
||||
@@ -117,21 +121,21 @@ describe('Message Operations', () => {
|
||||
const conversationId = uuidv4();
|
||||
|
||||
// Create multiple messages in the same conversation
|
||||
const message1 = await saveMessage(mockReq, {
|
||||
await saveMessage(mockReq, {
|
||||
messageId: 'msg1',
|
||||
conversationId,
|
||||
text: 'First message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
const message2 = await saveMessage(mockReq, {
|
||||
await saveMessage(mockReq, {
|
||||
messageId: 'msg2',
|
||||
conversationId,
|
||||
text: 'Second message',
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
const message3 = await saveMessage(mockReq, {
|
||||
await saveMessage(mockReq, {
|
||||
messageId: 'msg3',
|
||||
conversationId,
|
||||
text: 'Third message',
|
||||
@@ -314,4 +318,265 @@ describe('Message Operations', () => {
|
||||
expect(messages[0].text).toBe('Victim message');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isTemporary message handling', () => {
|
||||
beforeEach(() => {
|
||||
// Reset mocks before each test
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should save a message with expiredAt when isTemporary is true', async () => {
|
||||
// Mock custom config with 24 hour retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
const afterSave = new Date();
|
||||
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||
|
||||
// Verify expiredAt is approximately 24 hours in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 24 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
new Date(afterSave.getTime() + 24 * 60 * 60 * 1000 + 1000).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should save a message without expiredAt when isTemporary is false', async () => {
|
||||
mockReq.body = { isTemporary: false };
|
||||
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should save a message without expiredAt when isTemporary is not provided', async () => {
|
||||
// No isTemporary in body
|
||||
mockReq.body = {};
|
||||
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use custom retention period from config', async () => {
|
||||
// Mock custom config with 48 hour retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 48,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 48 hours in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 48 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle minimum retention period (1 hour)', async () => {
|
||||
// Mock custom config with less than minimum retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 1 hour in the future (minimum)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 1 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle maximum retention period (8760 hours)', async () => {
|
||||
// Mock custom config with more than maximum retention
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Verify expiredAt is approximately 8760 hours (1 year) in the future
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 8760 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle getCustomConfig errors gracefully', async () => {
|
||||
// Mock getCustomConfig to throw an error
|
||||
getCustomConfig.mockRejectedValue(new Error('Config service unavailable'));
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
// Should still save the message but with expiredAt as null
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use default retention when config is not provided', async () => {
|
||||
// Mock getCustomConfig to return empty config
|
||||
getCustomConfig.mockResolvedValue({});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
|
||||
// Default retention is 30 days (720 hours)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 30 * 24 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
expectedExpirationTime.getTime() + 1000,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not update expiredAt on message update', async () => {
|
||||
// First save a temporary message
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const savedMessage = await saveMessage(mockReq, mockMessageData);
|
||||
const originalExpiredAt = savedMessage.expiredAt;
|
||||
|
||||
// Now update the message without isTemporary flag
|
||||
mockReq.body = {};
|
||||
const updatedMessage = await updateMessage(mockReq, {
|
||||
messageId: 'msg123',
|
||||
text: 'Updated text',
|
||||
});
|
||||
|
||||
// expiredAt should not be in the returned updated message object
|
||||
expect(updatedMessage.expiredAt).toBeUndefined();
|
||||
|
||||
// Verify in database that expiredAt wasn't changed
|
||||
const dbMessage = await Message.findOne({ messageId: 'msg123', user: 'user123' });
|
||||
expect(dbMessage.expiredAt).toEqual(originalExpiredAt);
|
||||
});
|
||||
|
||||
it('should preserve expiredAt when saving existing temporary message', async () => {
|
||||
// First save a temporary message
|
||||
getCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const firstSave = await saveMessage(mockReq, mockMessageData);
|
||||
const originalExpiredAt = firstSave.expiredAt;
|
||||
|
||||
// Wait a bit to ensure time difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
// Save again with same messageId but different text
|
||||
const updatedData = { ...mockMessageData, text: 'Updated text' };
|
||||
const secondSave = await saveMessage(mockReq, updatedData);
|
||||
|
||||
// Should update text but create new expiredAt
|
||||
expect(secondSave.text).toBe('Updated text');
|
||||
expect(secondSave.expiredAt).toBeDefined();
|
||||
expect(new Date(secondSave.expiredAt).getTime()).toBeGreaterThan(
|
||||
new Date(originalExpiredAt).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle bulk operations with temporary messages', async () => {
|
||||
// This test verifies bulkSaveMessages doesn't interfere with expiredAt
|
||||
const messages = [
|
||||
{
|
||||
messageId: 'bulk1',
|
||||
conversationId: uuidv4(),
|
||||
text: 'Bulk message 1',
|
||||
user: 'user123',
|
||||
expiredAt: new Date(Date.now() + 24 * 60 * 60 * 1000),
|
||||
},
|
||||
{
|
||||
messageId: 'bulk2',
|
||||
conversationId: uuidv4(),
|
||||
text: 'Bulk message 2',
|
||||
user: 'user123',
|
||||
expiredAt: null,
|
||||
},
|
||||
];
|
||||
|
||||
await bulkSaveMessages(messages);
|
||||
|
||||
const savedMessages = await Message.find({
|
||||
messageId: { $in: ['bulk1', 'bulk2'] },
|
||||
}).lean();
|
||||
|
||||
expect(savedMessages).toHaveLength(2);
|
||||
|
||||
const bulk1 = savedMessages.find((m) => m.messageId === 'bulk1');
|
||||
const bulk2 = savedMessages.find((m) => m.messageId === 'bulk2');
|
||||
|
||||
expect(bulk1.expiredAt).toBeDefined();
|
||||
expect(bulk2.expiredAt).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
getProjectByName,
|
||||
addGroupIdsToProject,
|
||||
removeGroupIdsFromProject,
|
||||
Constants,
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
SystemCategories,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
removeGroupFromAllProjects,
|
||||
removeGroupIdsFromProject,
|
||||
addGroupIdsToProject,
|
||||
getProjectByName,
|
||||
} = require('./Project');
|
||||
const { removeAllPermissions } = require('~/server/services/PermissionService');
|
||||
const { PromptGroup, Prompt } = require('~/db/models');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
|
||||
@@ -100,10 +106,6 @@ const getAllPromptGroups = async (req, filter) => {
|
||||
try {
|
||||
const { name, ...query } = filter;
|
||||
|
||||
if (!query.author) {
|
||||
throw new Error('Author is required');
|
||||
}
|
||||
|
||||
let searchShared = true;
|
||||
let searchSharedOnly = false;
|
||||
if (name) {
|
||||
@@ -153,10 +155,6 @@ const getPromptGroups = async (req, filter) => {
|
||||
const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
|
||||
const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
|
||||
|
||||
if (!query.author) {
|
||||
throw new Error('Author is required');
|
||||
}
|
||||
|
||||
let searchShared = true;
|
||||
let searchSharedOnly = false;
|
||||
if (name) {
|
||||
@@ -221,12 +219,16 @@ const getPromptGroups = async (req, filter) => {
|
||||
* @returns {Promise<TDeletePromptGroupResponse>}
|
||||
*/
|
||||
const deletePromptGroup = async ({ _id, author, role }) => {
|
||||
const query = { _id, author };
|
||||
const groupQuery = { groupId: new ObjectId(_id), author };
|
||||
if (role === SystemRoles.ADMIN) {
|
||||
delete query.author;
|
||||
delete groupQuery.author;
|
||||
// Build query - with ACL, author is optional
|
||||
const query = { _id };
|
||||
const groupQuery = { groupId: new ObjectId(_id) };
|
||||
|
||||
// Legacy: Add author filter if provided (backward compatibility)
|
||||
if (author && role !== SystemRoles.ADMIN) {
|
||||
query.author = author;
|
||||
groupQuery.author = author;
|
||||
}
|
||||
|
||||
const response = await PromptGroup.deleteOne(query);
|
||||
|
||||
if (!response || response.deletedCount === 0) {
|
||||
@@ -235,13 +237,140 @@ const deletePromptGroup = async ({ _id, author, role }) => {
|
||||
|
||||
await Prompt.deleteMany(groupQuery);
|
||||
await removeGroupFromAllProjects(_id);
|
||||
|
||||
try {
|
||||
await removeAllPermissions({ resourceType: ResourceType.PROMPTGROUP, resourceId: _id });
|
||||
} catch (error) {
|
||||
logger.error('Error removing promptGroup permissions:', error);
|
||||
}
|
||||
|
||||
return { message: 'Prompt group deleted successfully' };
|
||||
};
|
||||
|
||||
/**
|
||||
* Get prompt groups by accessible IDs with optional cursor-based pagination.
|
||||
* @param {Object} params - The parameters for getting accessible prompt groups.
|
||||
* @param {Array} [params.accessibleIds] - Array of prompt group ObjectIds the user has ACL access to.
|
||||
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||
* @param {number} [params.limit] - Number of prompt groups to return (max 100). If not provided, returns all prompt groups.
|
||||
* @param {string} [params.after] - Cursor for pagination - get prompt groups after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object containing the prompt groups data and pagination info.
|
||||
*/
|
||||
async function getListPromptGroupsByAccess({
|
||||
accessibleIds = [],
|
||||
otherParams = {},
|
||||
limit = null,
|
||||
after = null,
|
||||
}) {
|
||||
const isPaginated = limit !== null && limit !== undefined;
|
||||
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||
|
||||
// Build base query combining ACL accessible prompt groups with other filters
|
||||
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||
|
||||
// Add cursor condition
|
||||
if (after) {
|
||||
try {
|
||||
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||
const { updatedAt, _id } = cursor;
|
||||
|
||||
const cursorCondition = {
|
||||
$or: [
|
||||
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||
{ updatedAt: new Date(updatedAt), _id: { $gt: new ObjectId(_id) } },
|
||||
],
|
||||
};
|
||||
|
||||
// Merge cursor condition with base query
|
||||
if (Object.keys(baseQuery).length > 0) {
|
||||
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||
// Remove the original conditions from baseQuery to avoid duplication
|
||||
Object.keys(baseQuery).forEach((key) => {
|
||||
if (key !== '$and') delete baseQuery[key];
|
||||
});
|
||||
} else {
|
||||
Object.assign(baseQuery, cursorCondition);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Invalid cursor:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Build aggregation pipeline
|
||||
const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
|
||||
|
||||
// Only apply limit if pagination is requested
|
||||
if (isPaginated) {
|
||||
pipeline.push({ $limit: normalizedLimit + 1 });
|
||||
}
|
||||
|
||||
// Add lookup for production prompt
|
||||
pipeline.push(
|
||||
{
|
||||
$lookup: {
|
||||
from: 'prompts',
|
||||
localField: 'productionId',
|
||||
foreignField: '_id',
|
||||
as: 'productionPrompt',
|
||||
},
|
||||
},
|
||||
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
|
||||
{
|
||||
$project: {
|
||||
name: 1,
|
||||
numberOfGenerations: 1,
|
||||
oneliner: 1,
|
||||
category: 1,
|
||||
projectIds: 1,
|
||||
productionId: 1,
|
||||
author: 1,
|
||||
authorName: 1,
|
||||
createdAt: 1,
|
||||
updatedAt: 1,
|
||||
'productionPrompt.prompt': 1,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const promptGroups = await PromptGroup.aggregate(pipeline).exec();
|
||||
|
||||
const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
|
||||
const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
|
||||
(group) => {
|
||||
if (group.author) {
|
||||
group.author = group.author.toString();
|
||||
}
|
||||
return group;
|
||||
},
|
||||
);
|
||||
|
||||
// Generate next cursor only if paginated
|
||||
let nextCursor = null;
|
||||
if (isPaginated && hasMore && data.length > 0) {
|
||||
const lastGroup = promptGroups[normalizedLimit - 1];
|
||||
nextCursor = Buffer.from(
|
||||
JSON.stringify({
|
||||
updatedAt: lastGroup.updatedAt.toISOString(),
|
||||
_id: lastGroup._id.toString(),
|
||||
}),
|
||||
).toString('base64');
|
||||
}
|
||||
|
||||
return {
|
||||
object: 'list',
|
||||
data,
|
||||
first_id: data.length > 0 ? data[0]._id.toString() : null,
|
||||
last_id: data.length > 0 ? data[data.length - 1]._id.toString() : null,
|
||||
has_more: hasMore,
|
||||
after: nextCursor,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getPromptGroups,
|
||||
deletePromptGroup,
|
||||
getAllPromptGroups,
|
||||
getListPromptGroupsByAccess,
|
||||
/**
|
||||
* Create a prompt and its respective group
|
||||
* @param {TCreatePromptRecord} saveData
|
||||
@@ -430,6 +559,16 @@ module.exports = {
|
||||
.lean();
|
||||
|
||||
if (remainingPrompts.length === 0) {
|
||||
// Remove all ACL entries for the promptGroup when deleting the last prompt
|
||||
try {
|
||||
await removeAllPermissions({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: groupId,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error removing promptGroup permissions:', error);
|
||||
}
|
||||
|
||||
await PromptGroup.deleteOne({ _id: groupId });
|
||||
await removeGroupFromAllProjects(groupId);
|
||||
|
||||
|
||||
564
api/models/Prompt.spec.js
Normal file
564
api/models/Prompt.spec.js
Normal file
@@ -0,0 +1,564 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
PermissionBits,
|
||||
} = require('librechat-data-provider');
|
||||
|
||||
// Mock the config/connect module to prevent connection attempts during tests
|
||||
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
|
||||
|
||||
const dbModels = require('~/db/models');
|
||||
|
||||
// Disable console for tests
|
||||
logger.silent = true;
|
||||
|
||||
let mongoServer;
|
||||
let Prompt, PromptGroup, AclEntry, AccessRole, User, Group, Project;
|
||||
let promptFns, permissionService;
|
||||
let testUsers, testGroups, testRoles;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Set up MongoDB memory server
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
|
||||
// Initialize models
|
||||
Prompt = dbModels.Prompt;
|
||||
PromptGroup = dbModels.PromptGroup;
|
||||
AclEntry = dbModels.AclEntry;
|
||||
AccessRole = dbModels.AccessRole;
|
||||
User = dbModels.User;
|
||||
Group = dbModels.Group;
|
||||
Project = dbModels.Project;
|
||||
|
||||
promptFns = require('~/models/Prompt');
|
||||
permissionService = require('~/server/services/PermissionService');
|
||||
|
||||
// Create test data
|
||||
await setupTestData();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
async function setupTestData() {
|
||||
// Create access roles for promptGroups
|
||||
testRoles = {
|
||||
viewer: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
name: 'Viewer',
|
||||
description: 'Can view promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW,
|
||||
}),
|
||||
editor: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||
name: 'Editor',
|
||||
description: 'Can view and edit promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
|
||||
}),
|
||||
owner: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
name: 'Owner',
|
||||
description: 'Full control over promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits:
|
||||
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
|
||||
}),
|
||||
};
|
||||
|
||||
// Create test users
|
||||
testUsers = {
|
||||
owner: await User.create({
|
||||
name: 'Prompt Owner',
|
||||
email: 'owner@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
editor: await User.create({
|
||||
name: 'Prompt Editor',
|
||||
email: 'editor@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
viewer: await User.create({
|
||||
name: 'Prompt Viewer',
|
||||
email: 'viewer@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
admin: await User.create({
|
||||
name: 'Admin User',
|
||||
email: 'admin@example.com',
|
||||
role: SystemRoles.ADMIN,
|
||||
}),
|
||||
noAccess: await User.create({
|
||||
name: 'No Access User',
|
||||
email: 'noaccess@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
};
|
||||
|
||||
// Create test groups
|
||||
testGroups = {
|
||||
editors: await Group.create({
|
||||
name: 'Prompt Editors',
|
||||
description: 'Group with editor access',
|
||||
}),
|
||||
viewers: await Group.create({
|
||||
name: 'Prompt Viewers',
|
||||
description: 'Group with viewer access',
|
||||
}),
|
||||
};
|
||||
|
||||
await Project.create({
|
||||
name: 'Global',
|
||||
description: 'Global project',
|
||||
promptGroupIds: [],
|
||||
});
|
||||
}
|
||||
|
||||
describe('Prompt ACL Permissions', () => {
|
||||
describe('Creating Prompts with Permissions', () => {
|
||||
it('should grant owner permissions when creating a prompt', async () => {
|
||||
// First create a group
|
||||
const testGroup = await PromptGroup.create({
|
||||
name: 'Test Group',
|
||||
category: 'testing',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
const promptData = {
|
||||
prompt: {
|
||||
prompt: 'Test prompt content',
|
||||
name: 'Test Prompt',
|
||||
type: 'text',
|
||||
groupId: testGroup._id,
|
||||
},
|
||||
author: testUsers.owner._id,
|
||||
};
|
||||
|
||||
await promptFns.savePrompt(promptData);
|
||||
|
||||
// Manually grant permissions as would happen in the route
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Check ACL entry
|
||||
const aclEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
});
|
||||
|
||||
expect(aclEntry).toBeTruthy();
|
||||
expect(aclEntry.permBits).toBe(testRoles.owner.permBits);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Accessing Prompts', () => {
|
||||
let testPromptGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a prompt group
|
||||
testPromptGroup = await PromptGroup.create({
|
||||
name: 'Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create a prompt
|
||||
await Prompt.create({
|
||||
prompt: 'Test prompt for access control',
|
||||
name: 'Access Test Prompt',
|
||||
author: testUsers.owner._id,
|
||||
groupId: testPromptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
// Grant owner permissions
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('owner should have full access to their prompt', async () => {
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
expect(hasAccess).toBe(true);
|
||||
|
||||
const canEdit = await permissionService.checkPermission({
|
||||
userId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
expect(canEdit).toBe(true);
|
||||
});
|
||||
|
||||
it('user with viewer role should only have view access', async () => {
|
||||
// Grant viewer permissions
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
const canView = await permissionService.checkPermission({
|
||||
userId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const canEdit = await permissionService.checkPermission({
|
||||
userId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
expect(canView).toBe(true);
|
||||
expect(canEdit).toBe(false);
|
||||
});
|
||||
|
||||
it('user without permissions should have no access', async () => {
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.noAccess._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
expect(hasAccess).toBe(false);
|
||||
});
|
||||
|
||||
it('admin should have access regardless of permissions', async () => {
|
||||
// Admin users should work through normal permission system
|
||||
// The middleware layer handles admin bypass, not the permission service
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.admin._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
// Without explicit permissions, even admin won't have access at this layer
|
||||
expect(hasAccess).toBe(false);
|
||||
|
||||
// The actual admin bypass happens in the middleware layer (`canAccessPromptViaGroup`/`canAccessPromptGroupResource`)
|
||||
// which checks req.user.role === SystemRoles.ADMIN
|
||||
});
|
||||
});
|
||||
|
||||
describe('Group-based Access', () => {
|
||||
let testPromptGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a prompt group first
|
||||
testPromptGroup = await PromptGroup.create({
|
||||
name: 'Group Access Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
await Prompt.create({
|
||||
prompt: 'Group access test prompt',
|
||||
name: 'Group Test',
|
||||
author: testUsers.owner._id,
|
||||
groupId: testPromptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
// Add users to groups
|
||||
await User.findByIdAndUpdate(testUsers.editor._id, {
|
||||
$push: { groups: testGroups.editors._id },
|
||||
});
|
||||
|
||||
await User.findByIdAndUpdate(testUsers.viewer._id, {
|
||||
$push: { groups: testGroups.viewers._id },
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
await User.updateMany({}, { $set: { groups: [] } });
|
||||
});
|
||||
|
||||
it('group members should inherit group permissions', async () => {
|
||||
// Create a prompt group
|
||||
const testPromptGroup = await PromptGroup.create({
|
||||
name: 'Group Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
const { addUserToGroup } = require('~/models');
|
||||
await addUserToGroup(testUsers.editor._id, testGroups.editors._id);
|
||||
|
||||
const prompt = await promptFns.savePrompt({
|
||||
author: testUsers.owner._id,
|
||||
prompt: {
|
||||
prompt: 'Group test prompt',
|
||||
name: 'Group Test',
|
||||
groupId: testPromptGroup._id,
|
||||
type: 'text',
|
||||
},
|
||||
});
|
||||
|
||||
// Check if savePrompt returned an error
|
||||
if (!prompt || !prompt.prompt) {
|
||||
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
|
||||
}
|
||||
|
||||
// Grant edit permissions to the group
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.GROUP,
|
||||
principalId: testGroups.editors._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Check if group member has access
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.editor._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
expect(hasAccess).toBe(true);
|
||||
|
||||
// Check that non-member doesn't have access
|
||||
const nonMemberAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
expect(nonMemberAccess).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Public Access', () => {
|
||||
let publicPromptGroup, privatePromptGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create separate prompt groups for public and private access
|
||||
publicPromptGroup = await PromptGroup.create({
|
||||
name: 'Public Access Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
privatePromptGroup = await PromptGroup.create({
|
||||
name: 'Private Access Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create prompts in their respective groups
|
||||
await Prompt.create({
|
||||
prompt: 'Public prompt',
|
||||
name: 'Public',
|
||||
author: testUsers.owner._id,
|
||||
groupId: publicPromptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
await Prompt.create({
|
||||
prompt: 'Private prompt',
|
||||
name: 'Private',
|
||||
author: testUsers.owner._id,
|
||||
groupId: privatePromptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
// Grant public view access to publicPromptGroup
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.PUBLIC,
|
||||
principalId: null,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: publicPromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Grant only owner access to privatePromptGroup
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: privatePromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('public prompt should be accessible to any user', async () => {
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.noAccess._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: publicPromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
includePublic: true,
|
||||
});
|
||||
|
||||
expect(hasAccess).toBe(true);
|
||||
});
|
||||
|
||||
it('private prompt should not be accessible to unauthorized users', async () => {
|
||||
const hasAccess = await permissionService.checkPermission({
|
||||
userId: testUsers.noAccess._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: privatePromptGroup._id,
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
includePublic: true,
|
||||
});
|
||||
|
||||
expect(hasAccess).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Prompt Deletion', () => {
|
||||
let testPromptGroup;
|
||||
|
||||
it('should remove ACL entries when prompt is deleted', async () => {
|
||||
testPromptGroup = await PromptGroup.create({
|
||||
name: 'Deletion Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
const prompt = await promptFns.savePrompt({
|
||||
author: testUsers.owner._id,
|
||||
prompt: {
|
||||
prompt: 'To be deleted',
|
||||
name: 'Delete Test',
|
||||
groupId: testPromptGroup._id,
|
||||
type: 'text',
|
||||
},
|
||||
});
|
||||
|
||||
// Check if savePrompt returned an error
|
||||
if (!prompt || !prompt.prompt) {
|
||||
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
|
||||
}
|
||||
|
||||
const testPromptId = prompt.prompt._id;
|
||||
const promptGroupId = testPromptGroup._id;
|
||||
|
||||
// Grant permission
|
||||
await permissionService.grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Verify ACL entry exists
|
||||
const beforeDelete = await AclEntry.find({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
});
|
||||
expect(beforeDelete).toHaveLength(1);
|
||||
|
||||
// Delete the prompt
|
||||
await promptFns.deletePrompt({
|
||||
promptId: testPromptId,
|
||||
groupId: promptGroupId,
|
||||
author: testUsers.owner._id,
|
||||
role: SystemRoles.USER,
|
||||
});
|
||||
|
||||
// Verify ACL entries are removed
|
||||
const aclEntries = await AclEntry.find({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testPromptGroup._id,
|
||||
});
|
||||
|
||||
expect(aclEntries).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Backwards Compatibility', () => {
|
||||
it('should handle prompts without ACL entries gracefully', async () => {
|
||||
// Create a prompt group first
|
||||
const promptGroup = await PromptGroup.create({
|
||||
name: 'Legacy Test Group',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create a prompt without ACL entries (legacy prompt)
|
||||
const legacyPrompt = await Prompt.create({
|
||||
prompt: 'Legacy prompt without ACL',
|
||||
name: 'Legacy',
|
||||
author: testUsers.owner._id,
|
||||
groupId: promptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
// The system should handle this gracefully
|
||||
const prompt = await promptFns.getPrompt({ _id: legacyPrompt._id });
|
||||
expect(prompt).toBeTruthy();
|
||||
expect(prompt._id.toString()).toBe(legacyPrompt._id.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
280
api/models/PromptGroupMigration.spec.js
Normal file
280
api/models/PromptGroupMigration.spec.js
Normal file
@@ -0,0 +1,280 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
Constants,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
PrincipalModel,
|
||||
PermissionBits,
|
||||
} = require('librechat-data-provider');
|
||||
|
||||
// Mock the config/connect module to prevent connection attempts during tests
|
||||
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
|
||||
|
||||
// Disable console for tests
|
||||
logger.silent = true;
|
||||
|
||||
describe('PromptGroup Migration Script', () => {
|
||||
let mongoServer;
|
||||
let Prompt, PromptGroup, AclEntry, AccessRole, User, Project;
|
||||
let migrateToPromptGroupPermissions;
|
||||
let testOwner, testProject;
|
||||
let ownerRole, viewerRole;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Set up MongoDB memory server
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
|
||||
// Initialize models
|
||||
const dbModels = require('~/db/models');
|
||||
Prompt = dbModels.Prompt;
|
||||
PromptGroup = dbModels.PromptGroup;
|
||||
AclEntry = dbModels.AclEntry;
|
||||
AccessRole = dbModels.AccessRole;
|
||||
User = dbModels.User;
|
||||
Project = dbModels.Project;
|
||||
|
||||
// Create test user
|
||||
testOwner = await User.create({
|
||||
name: 'Test Owner',
|
||||
email: 'owner@test.com',
|
||||
role: 'USER',
|
||||
});
|
||||
|
||||
// Create test project with the proper name
|
||||
const projectName = Constants.GLOBAL_PROJECT_NAME || 'instance';
|
||||
testProject = await Project.create({
|
||||
name: projectName,
|
||||
description: 'Global project',
|
||||
promptGroupIds: [],
|
||||
});
|
||||
|
||||
// Create promptGroup access roles
|
||||
ownerRole = await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
name: 'Owner',
|
||||
description: 'Full control over promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits:
|
||||
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
|
||||
});
|
||||
|
||||
viewerRole = await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
name: 'Viewer',
|
||||
description: 'Can view promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||
name: 'Editor',
|
||||
description: 'Can view and edit promptGroups',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
// Import migration function
|
||||
const migration = require('../../config/migrate-prompt-permissions');
|
||||
migrateToPromptGroupPermissions = migration.migrateToPromptGroupPermissions;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// Clean up before each test
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
// Reset the project's promptGroupIds array
|
||||
testProject.promptGroupIds = [];
|
||||
await testProject.save();
|
||||
});
|
||||
|
||||
it('should categorize promptGroups correctly in dry run', async () => {
|
||||
// Create global prompt group (in Global project)
|
||||
const globalPromptGroup = await PromptGroup.create({
|
||||
name: 'Global Group',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create private prompt group (not in any project)
|
||||
await PromptGroup.create({
|
||||
name: 'Private Group',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Add global group to project's promptGroupIds array
|
||||
testProject.promptGroupIds = [globalPromptGroup._id];
|
||||
await testProject.save();
|
||||
|
||||
const result = await migrateToPromptGroupPermissions({ dryRun: true });
|
||||
|
||||
expect(result.dryRun).toBe(true);
|
||||
expect(result.summary.total).toBe(2);
|
||||
expect(result.summary.globalViewAccess).toBe(1);
|
||||
expect(result.summary.privateGroups).toBe(1);
|
||||
});
|
||||
|
||||
it('should grant appropriate permissions during migration', async () => {
|
||||
// Create prompt groups
|
||||
const globalPromptGroup = await PromptGroup.create({
|
||||
name: 'Global Group',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
const privatePromptGroup = await PromptGroup.create({
|
||||
name: 'Private Group',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Add global group to project's promptGroupIds array
|
||||
testProject.promptGroupIds = [globalPromptGroup._id];
|
||||
await testProject.save();
|
||||
|
||||
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||
|
||||
expect(result.migrated).toBe(2);
|
||||
expect(result.errors).toBe(0);
|
||||
expect(result.ownerGrants).toBe(2);
|
||||
expect(result.publicViewGrants).toBe(1);
|
||||
|
||||
// Check global promptGroup permissions
|
||||
const globalOwnerEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: globalPromptGroup._id,
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testOwner._id,
|
||||
});
|
||||
expect(globalOwnerEntry).toBeTruthy();
|
||||
expect(globalOwnerEntry.permBits).toBe(ownerRole.permBits);
|
||||
|
||||
const globalPublicEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: globalPromptGroup._id,
|
||||
principalType: PrincipalType.PUBLIC,
|
||||
});
|
||||
expect(globalPublicEntry).toBeTruthy();
|
||||
expect(globalPublicEntry.permBits).toBe(viewerRole.permBits);
|
||||
|
||||
// Check private promptGroup permissions
|
||||
const privateOwnerEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: privatePromptGroup._id,
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testOwner._id,
|
||||
});
|
||||
expect(privateOwnerEntry).toBeTruthy();
|
||||
expect(privateOwnerEntry.permBits).toBe(ownerRole.permBits);
|
||||
|
||||
const privatePublicEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: privatePromptGroup._id,
|
||||
principalType: PrincipalType.PUBLIC,
|
||||
});
|
||||
expect(privatePublicEntry).toBeNull();
|
||||
});
|
||||
|
||||
it('should skip promptGroups that already have ACL entries', async () => {
|
||||
// Create prompt groups
|
||||
const promptGroup1 = await PromptGroup.create({
|
||||
name: 'Group 1',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
const promptGroup2 = await PromptGroup.create({
|
||||
name: 'Group 2',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Grant permission to one promptGroup manually (simulating it already has ACL)
|
||||
await AclEntry.create({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testOwner._id,
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: promptGroup1._id,
|
||||
permBits: ownerRole.permBits,
|
||||
roleId: ownerRole._id,
|
||||
grantedBy: testOwner._id,
|
||||
grantedAt: new Date(),
|
||||
});
|
||||
|
||||
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||
|
||||
// Should only migrate promptGroup2, skip promptGroup1
|
||||
expect(result.migrated).toBe(1);
|
||||
expect(result.errors).toBe(0);
|
||||
|
||||
// Verify promptGroup2 now has permissions
|
||||
const group2Entry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: promptGroup2._id,
|
||||
});
|
||||
expect(group2Entry).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle promptGroups with prompts correctly', async () => {
|
||||
// Create a promptGroup with some prompts
|
||||
const promptGroup = await PromptGroup.create({
|
||||
name: 'Group with Prompts',
|
||||
author: testOwner._id,
|
||||
authorName: testOwner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create some prompts in this group
|
||||
await Prompt.create({
|
||||
prompt: 'First prompt',
|
||||
author: testOwner._id,
|
||||
groupId: promptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
await Prompt.create({
|
||||
prompt: 'Second prompt',
|
||||
author: testOwner._id,
|
||||
groupId: promptGroup._id,
|
||||
type: 'text',
|
||||
});
|
||||
|
||||
const result = await migrateToPromptGroupPermissions({ dryRun: false });
|
||||
|
||||
expect(result.migrated).toBe(1);
|
||||
expect(result.errors).toBe(0);
|
||||
|
||||
// Verify the promptGroup has permissions
|
||||
const groupEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: promptGroup._id,
|
||||
});
|
||||
expect(groupEntry).toBeTruthy();
|
||||
|
||||
// Verify no prompt-level permissions were created
|
||||
const promptEntries = await AclEntry.find({
|
||||
resourceType: 'prompt',
|
||||
});
|
||||
expect(promptEntries).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -16,7 +16,7 @@ const { Role } = require('~/db/models');
|
||||
*
|
||||
* @param {string} roleName - The name of the role to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<Object>} A plain object representing the role document.
|
||||
* @returns {Promise<IRole>} Role document.
|
||||
*/
|
||||
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
@@ -72,8 +72,9 @@ const updateRoleByName = async function (roleName, updates) {
|
||||
* Updates access permissions for a specific role and multiple permission types.
|
||||
* @param {string} roleName - The role to update.
|
||||
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
|
||||
* @param {IRole} [roleData] - Optional role data to use instead of fetching from the database.
|
||||
*/
|
||||
async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
|
||||
// Filter and clean the permission updates based on our schema definition.
|
||||
const updates = {};
|
||||
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
|
||||
@@ -86,7 +87,7 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
}
|
||||
|
||||
try {
|
||||
const role = await getRoleByName(roleName);
|
||||
const role = roleData ?? (await getRoleByName(roleName));
|
||||
if (!role) {
|
||||
return;
|
||||
}
|
||||
@@ -113,7 +114,6 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
}
|
||||
}
|
||||
|
||||
// Process the current updates
|
||||
for (const [permissionType, permissions] of Object.entries(updates)) {
|
||||
const currentTypePermissions = currentPermissions[permissionType] || {};
|
||||
updatedPermissions[permissionType] = { ...currentTypePermissions };
|
||||
|
||||
@@ -22,6 +22,7 @@ const {
|
||||
} = require('./Message');
|
||||
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
|
||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||
const { File } = require('~/db/models');
|
||||
|
||||
module.exports = {
|
||||
...methods,
|
||||
@@ -51,4 +52,6 @@ module.exports = {
|
||||
getPresets,
|
||||
savePreset,
|
||||
deletePresets,
|
||||
|
||||
Files: File,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { matchModelName } = require('../utils');
|
||||
const { matchModelName } = require('../utils/tokens');
|
||||
const defaultRate = 6;
|
||||
|
||||
/**
|
||||
@@ -87,6 +87,9 @@ const tokenValues = Object.assign(
|
||||
'gpt-4.1': { prompt: 2, completion: 8 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-5': { prompt: 1.25, completion: 10 },
|
||||
'gpt-5-mini': { prompt: 0.25, completion: 2 },
|
||||
'gpt-5-nano': { prompt: 0.05, completion: 0.4 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
|
||||
'gpt-4-1106': { prompt: 10, completion: 30 },
|
||||
@@ -147,6 +150,9 @@ const tokenValues = Object.assign(
|
||||
codestral: { prompt: 0.3, completion: 0.9 },
|
||||
'ministral-8b': { prompt: 0.1, completion: 0.1 },
|
||||
'ministral-3b': { prompt: 0.04, completion: 0.04 },
|
||||
// GPT-OSS models
|
||||
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
|
||||
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
|
||||
},
|
||||
bedrockValues,
|
||||
);
|
||||
@@ -214,6 +220,12 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-4.1';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-5-nano')) {
|
||||
return 'gpt-5-nano';
|
||||
} else if (modelName.includes('gpt-5-mini')) {
|
||||
return 'gpt-5-mini';
|
||||
} else if (modelName.includes('gpt-5')) {
|
||||
return 'gpt-5';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
return 'gpt-4o-mini';
|
||||
} else if (modelName.includes('gpt-4o')) {
|
||||
|
||||
@@ -25,8 +25,14 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4-some-other-info')).toBe('8k');
|
||||
});
|
||||
|
||||
it('should return undefined for model names that do not match any known patterns', () => {
|
||||
expect(getValueKey('gpt-5-some-other-info')).toBeUndefined();
|
||||
it('should return "gpt-5" for model name containing "gpt-5"', () => {
|
||||
expect(getValueKey('gpt-5-some-other-info')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
|
||||
});
|
||||
|
||||
it('should return "gpt-3.5-turbo-1106" for model name containing "gpt-3.5-turbo-1106"', () => {
|
||||
@@ -84,6 +90,29 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4.1-nano-0125')).toBe('gpt-4.1-nano');
|
||||
});
|
||||
|
||||
it('should return "gpt-5" for model type of "gpt-5"', () => {
|
||||
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
|
||||
});
|
||||
|
||||
it('should return "gpt-5-mini" for model type of "gpt-5-mini"', () => {
|
||||
expect(getValueKey('gpt-5-mini-2025-01-30')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('openai/gpt-5-mini')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('gpt-5-mini-0130')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('gpt-5-mini-2025-01-30-0130')).toBe('gpt-5-mini');
|
||||
});
|
||||
|
||||
it('should return "gpt-5-nano" for model type of "gpt-5-nano"', () => {
|
||||
expect(getValueKey('gpt-5-nano-2025-01-30')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('openai/gpt-5-nano')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('gpt-5-nano-0130')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('gpt-5-nano-2025-01-30-0130')).toBe('gpt-5-nano');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
@@ -207,6 +236,48 @@ describe('getMultiplier', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5', () => {
|
||||
const valueKey = getValueKey('gpt-5-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5-mini', () => {
|
||||
const valueKey = getValueKey('gpt-5-mini-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-mini'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-mini-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5-mini', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-mini'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5-nano', () => {
|
||||
const valueKey = getValueKey('gpt-5-nano-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-nano'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-nano'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-nano-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5-nano'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5-nano', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-nano'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4o', () => {
|
||||
const valueKey = getValueKey('gpt-4o-2024-08-06');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);
|
||||
@@ -307,10 +378,22 @@ describe('getMultiplier', () => {
|
||||
});
|
||||
|
||||
it('should return defaultRate if derived valueKey does not match any known patterns', () => {
|
||||
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-5-some-other-info' })).toBe(
|
||||
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-10-some-other-info' })).toBe(
|
||||
defaultRate,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return correct multipliers for GPT-OSS models', () => {
|
||||
const models = ['gpt-oss-20b', 'gpt-oss-120b'];
|
||||
models.forEach((key) => {
|
||||
const expectedPrompt = tokenValues[key].prompt;
|
||||
const expectedCompletion = tokenValues[key].completion;
|
||||
expect(getMultiplier({ valueKey: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||
expect(getMultiplier({ valueKey: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||
expect(getMultiplier({ model: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('AWS Bedrock Model Tests', () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.9-rc1",
|
||||
"version": "v0.8.0-rc2",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -49,10 +49,11 @@
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/openai": "^0.5.18",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.63",
|
||||
"@librechat/agents": "^2.4.75",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
@@ -72,6 +73,7 @@
|
||||
"express-static-gzip": "^2.2.0",
|
||||
"file-type": "^18.7.0",
|
||||
"firebase": "^11.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
@@ -94,7 +96,7 @@
|
||||
"node-fetch": "^2.7.0",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.96.2",
|
||||
"openai": "^5.10.1",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^6.5.0",
|
||||
"passport": "^0.6.0",
|
||||
@@ -119,7 +121,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^29.7.0",
|
||||
"mongodb-memory-server": "^10.1.3",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.1.0"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ const {
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { getGraphApiToken } = require('~/server/services/GraphTokenService');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
@@ -118,9 +119,54 @@ const refreshController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
const graphTokenController = async (req, res) => {
|
||||
try {
|
||||
// Validate user is authenticated via Entra ID
|
||||
if (!req.user.openidId || req.user.provider !== 'openid') {
|
||||
return res.status(403).json({
|
||||
message: 'Microsoft Graph access requires Entra ID authentication',
|
||||
});
|
||||
}
|
||||
|
||||
// Check if OpenID token reuse is active (required for on-behalf-of flow)
|
||||
if (!isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
return res.status(403).json({
|
||||
message: 'SharePoint integration requires OpenID token reuse to be enabled',
|
||||
});
|
||||
}
|
||||
|
||||
// Extract access token from Authorization header
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader || !authHeader.startsWith('Bearer ')) {
|
||||
return res.status(401).json({
|
||||
message: 'Valid authorization token required',
|
||||
});
|
||||
}
|
||||
|
||||
// Get scopes from query parameters
|
||||
const scopes = req.query.scopes;
|
||||
if (!scopes) {
|
||||
return res.status(400).json({
|
||||
message: 'Graph API scopes are required as query parameter',
|
||||
});
|
||||
}
|
||||
|
||||
const accessToken = authHeader.substring(7); // Remove 'Bearer ' prefix
|
||||
const tokenResponse = await getGraphApiToken(req.user, accessToken, scopes);
|
||||
|
||||
res.json(tokenResponse);
|
||||
} catch (error) {
|
||||
logger.error('[graphTokenController] Failed to obtain Graph API token:', error);
|
||||
res.status(500).json({
|
||||
message: 'Failed to obtain Microsoft Graph token',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
refreshController,
|
||||
registrationController,
|
||||
resetPasswordController,
|
||||
resetPasswordRequestController,
|
||||
graphTokenController,
|
||||
};
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
const { logger } = require('~/config');
|
||||
|
||||
//handle duplicates
|
||||
const handleDuplicateKeyError = (err, res) => {
|
||||
logger.error('Duplicate key error:', err.keyValue);
|
||||
const field = `${JSON.stringify(Object.keys(err.keyValue))}`;
|
||||
const code = 409;
|
||||
res
|
||||
.status(code)
|
||||
.send({ messages: `An document with that ${field} already exists.`, fields: field });
|
||||
};
|
||||
|
||||
//handle validation errors
|
||||
const handleValidationError = (err, res) => {
|
||||
logger.error('Validation error:', err.errors);
|
||||
let errors = Object.values(err.errors).map((el) => el.message);
|
||||
let fields = `${JSON.stringify(Object.values(err.errors).map((el) => el.path))}`;
|
||||
let code = 400;
|
||||
if (errors.length > 1) {
|
||||
errors = errors.join(' ');
|
||||
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
|
||||
} else {
|
||||
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = (err, _req, res, _next) => {
|
||||
try {
|
||||
if (err.name === 'ValidationError') {
|
||||
return handleValidationError(err, res);
|
||||
}
|
||||
if (err.code && err.code == 11000) {
|
||||
return handleDuplicateKeyError(err, res);
|
||||
}
|
||||
// Special handling for errors like SyntaxError
|
||||
if (err.statusCode && err.body) {
|
||||
return res.status(err.statusCode).send(err.body);
|
||||
}
|
||||
|
||||
logger.error('ErrorController => error', err);
|
||||
return res.status(500).send('An unknown error occurred.');
|
||||
} catch (err) {
|
||||
logger.error('ErrorController => processing error', err);
|
||||
return res.status(500).send('Processing error in ErrorController.');
|
||||
}
|
||||
};
|
||||
@@ -5,6 +5,7 @@ const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @returns {Promise<TModelsConfig>} The models config.
|
||||
*/
|
||||
const getModelsConfig = async (req) => {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
|
||||
@@ -4,12 +4,13 @@
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ResourceType, PrincipalType } = require('librechat-data-provider');
|
||||
const {
|
||||
getAvailableRoles,
|
||||
ensurePrincipalExists,
|
||||
getEffectivePermissions,
|
||||
ensureGroupPrincipalExists,
|
||||
bulkUpdateResourcePermissions,
|
||||
ensureGroupPrincipalExists,
|
||||
getEffectivePermissions,
|
||||
ensurePrincipalExists,
|
||||
getAvailableRoles,
|
||||
} = require('~/server/services/PermissionService');
|
||||
const { AclEntry } = require('~/db/models');
|
||||
const {
|
||||
@@ -18,8 +19,8 @@ const {
|
||||
calculateRelevanceScore,
|
||||
} = require('~/models');
|
||||
const {
|
||||
searchEntraIdPrincipals,
|
||||
entraIdPrincipalFeatureEnabled,
|
||||
searchEntraIdPrincipals,
|
||||
} = require('~/server/services/GraphApiService');
|
||||
|
||||
/**
|
||||
@@ -27,6 +28,18 @@ const {
|
||||
* Delegates validation and logic to PermissionService
|
||||
*/
|
||||
|
||||
/**
|
||||
* Validates that the resourceType is one of the supported enum values
|
||||
* @param {string} resourceType - The resource type to validate
|
||||
* @throws {Error} If resourceType is not valid
|
||||
*/
|
||||
const validateResourceType = (resourceType) => {
|
||||
const validTypes = Object.values(ResourceType);
|
||||
if (!validTypes.includes(resourceType)) {
|
||||
throw new Error(`Invalid resourceType: ${resourceType}. Valid types: ${validTypes.join(', ')}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Bulk update permissions for a resource (grant, update, remove)
|
||||
* @route PUT /api/{resourceType}/{resourceId}/permissions
|
||||
@@ -41,6 +54,8 @@ const {
|
||||
const updateResourcePermissions = async (req, res) => {
|
||||
try {
|
||||
const { resourceType, resourceId } = req.params;
|
||||
validateResourceType(resourceType);
|
||||
|
||||
/** @type {TUpdateResourcePermissionsRequest} */
|
||||
const { updated, removed, public: isPublic, publicAccessRoleId } = req.body;
|
||||
const { id: userId } = req.user;
|
||||
@@ -57,7 +72,7 @@ const updateResourcePermissions = async (req, res) => {
|
||||
// Add public permission if enabled
|
||||
if (isPublic && publicAccessRoleId) {
|
||||
updatedPrincipals.push({
|
||||
type: 'public',
|
||||
type: PrincipalType.PUBLIC,
|
||||
id: null,
|
||||
accessRoleId: publicAccessRoleId,
|
||||
});
|
||||
@@ -82,11 +97,13 @@ const updateResourcePermissions = async (req, res) => {
|
||||
try {
|
||||
let principalId;
|
||||
|
||||
if (principal.type === 'public') {
|
||||
if (principal.type === PrincipalType.PUBLIC) {
|
||||
principalId = null; // Public principals don't need database records
|
||||
} else if (principal.type === 'user') {
|
||||
} else if (principal.type === PrincipalType.ROLE) {
|
||||
principalId = principal.id; // Role principals use role name as ID
|
||||
} else if (principal.type === PrincipalType.USER) {
|
||||
principalId = await ensurePrincipalExists(principal);
|
||||
} else if (principal.type === 'group') {
|
||||
} else if (principal.type === PrincipalType.GROUP) {
|
||||
// Pass authContext to enable member fetching for Entra ID groups when available
|
||||
principalId = await ensureGroupPrincipalExists(principal, authContext);
|
||||
} else {
|
||||
@@ -122,7 +139,7 @@ const updateResourcePermissions = async (req, res) => {
|
||||
// If public is disabled, add public to revoked list
|
||||
if (!isPublic) {
|
||||
revokedPrincipals.push({
|
||||
type: 'public',
|
||||
type: PrincipalType.PUBLIC,
|
||||
id: null,
|
||||
});
|
||||
}
|
||||
@@ -163,6 +180,7 @@ const updateResourcePermissions = async (req, res) => {
|
||||
const getResourcePermissions = async (req, res) => {
|
||||
try {
|
||||
const { resourceType, resourceId } = req.params;
|
||||
validateResourceType(resourceType);
|
||||
|
||||
// Use aggregation pipeline for efficient single-query data retrieval
|
||||
const results = await AclEntry.aggregate([
|
||||
@@ -219,14 +237,14 @@ const getResourcePermissions = async (req, res) => {
|
||||
|
||||
// Process aggregation results
|
||||
for (const result of results) {
|
||||
if (result.principalType === 'public') {
|
||||
if (result.principalType === PrincipalType.PUBLIC) {
|
||||
publicPermission = {
|
||||
public: true,
|
||||
publicAccessRoleId: result.accessRoleId,
|
||||
};
|
||||
} else if (result.principalType === 'user' && result.userInfo) {
|
||||
} else if (result.principalType === PrincipalType.USER && result.userInfo) {
|
||||
principals.push({
|
||||
type: 'user',
|
||||
type: PrincipalType.USER,
|
||||
id: result.userInfo._id.toString(),
|
||||
name: result.userInfo.name || result.userInfo.username,
|
||||
email: result.userInfo.email,
|
||||
@@ -235,9 +253,9 @@ const getResourcePermissions = async (req, res) => {
|
||||
idOnTheSource: result.userInfo.idOnTheSource || result.userInfo._id.toString(),
|
||||
accessRoleId: result.accessRoleId,
|
||||
});
|
||||
} else if (result.principalType === 'group' && result.groupInfo) {
|
||||
} else if (result.principalType === PrincipalType.GROUP && result.groupInfo) {
|
||||
principals.push({
|
||||
type: 'group',
|
||||
type: PrincipalType.GROUP,
|
||||
id: result.groupInfo._id.toString(),
|
||||
name: result.groupInfo.name,
|
||||
email: result.groupInfo.email,
|
||||
@@ -247,6 +265,16 @@ const getResourcePermissions = async (req, res) => {
|
||||
idOnTheSource: result.groupInfo.idOnTheSource || result.groupInfo._id.toString(),
|
||||
accessRoleId: result.accessRoleId,
|
||||
});
|
||||
} else if (result.principalType === PrincipalType.ROLE) {
|
||||
principals.push({
|
||||
type: PrincipalType.ROLE,
|
||||
/** Role name as ID */
|
||||
id: result.principalId,
|
||||
/** Display the role name */
|
||||
name: result.principalId,
|
||||
description: `System role: ${result.principalId}`,
|
||||
accessRoleId: result.accessRoleId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,6 +306,7 @@ const getResourcePermissions = async (req, res) => {
|
||||
const getResourceRoles = async (req, res) => {
|
||||
try {
|
||||
const { resourceType } = req.params;
|
||||
validateResourceType(resourceType);
|
||||
|
||||
const roles = await getAvailableRoles({ resourceType });
|
||||
|
||||
@@ -305,10 +334,13 @@ const getResourceRoles = async (req, res) => {
|
||||
const getUserEffectivePermissions = async (req, res) => {
|
||||
try {
|
||||
const { resourceType, resourceId } = req.params;
|
||||
validateResourceType(resourceType);
|
||||
|
||||
const { id: userId } = req.user;
|
||||
|
||||
const permissionBits = await getEffectivePermissions({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType,
|
||||
resourceId,
|
||||
});
|
||||
@@ -347,7 +379,9 @@ const searchPrincipals = async (req, res) => {
|
||||
}
|
||||
|
||||
const searchLimit = Math.min(Math.max(1, parseInt(limit) || 10), 50);
|
||||
const typeFilter = ['user', 'group'].includes(type) ? type : null;
|
||||
const typeFilter = [PrincipalType.USER, PrincipalType.GROUP, PrincipalType.ROLE].includes(type)
|
||||
? type
|
||||
: null;
|
||||
|
||||
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilter);
|
||||
let allPrincipals = [...localResults];
|
||||
|
||||
@@ -1,54 +1,16 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, AuthType, Constants } = require('librechat-data-provider');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
getToolkitKey,
|
||||
checkPluginAuth,
|
||||
filterUniquePlugins,
|
||||
convertMCPToolsToPlugins,
|
||||
} = require('@librechat/api');
|
||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const { getToolkitKey } = require('~/server/services/ToolService');
|
||||
const { availableTools, toolkits } = require('~/app/clients/tools');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* Filters out duplicate plugins from the list of plugins.
|
||||
*
|
||||
* @param {TPlugin[]} plugins The list of plugins to filter.
|
||||
* @returns {TPlugin[]} The list of plugins with duplicates removed.
|
||||
*/
|
||||
const filterUniquePlugins = (plugins) => {
|
||||
const seen = new Set();
|
||||
return plugins.filter((plugin) => {
|
||||
const duplicate = seen.has(plugin.pluginKey);
|
||||
seen.add(plugin.pluginKey);
|
||||
return !duplicate;
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines if a plugin is authenticated by checking if all required authentication fields have non-empty values.
|
||||
* Supports alternate authentication fields, allowing validation against multiple possible environment variables.
|
||||
*
|
||||
* @param {TPlugin} plugin The plugin object containing the authentication configuration.
|
||||
* @returns {boolean} True if the plugin is authenticated for all required fields, false otherwise.
|
||||
*/
|
||||
const checkPluginAuth = (plugin) => {
|
||||
if (!plugin.authConfig || plugin.authConfig.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return plugin.authConfig.every((authFieldObj) => {
|
||||
const authFieldOptions = authFieldObj.authField.split('||');
|
||||
let isFieldAuthenticated = false;
|
||||
|
||||
for (const fieldOption of authFieldOptions) {
|
||||
const envValue = process.env[fieldOption];
|
||||
if (envValue && envValue.trim() !== '' && envValue !== AuthType.USER_PROVIDED) {
|
||||
isFieldAuthenticated = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isFieldAuthenticated;
|
||||
});
|
||||
};
|
||||
|
||||
const getAvailablePluginsController = async (req, res) => {
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
@@ -138,15 +100,21 @@ function createGetServerTools() {
|
||||
*/
|
||||
const getAvailableTools = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user?.id;
|
||||
const customConfig = await getCustomConfig();
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||
if (cachedToolsArray) {
|
||||
res.status(200).json(cachedToolsArray);
|
||||
const cachedUserTools = await getCachedTools({ userId });
|
||||
const userPlugins = convertMCPToolsToPlugins({ functionTools: cachedUserTools, customConfig });
|
||||
|
||||
if (cachedToolsArray != null && userPlugins != null) {
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
|
||||
res.status(200).json(dedupedTools);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not in cache, build from manifest
|
||||
let pluginManifest = availableTools;
|
||||
const customConfig = await getCustomConfig();
|
||||
if (customConfig?.mcpServers != null) {
|
||||
const mcpManager = getMCPManager();
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
@@ -179,7 +147,9 @@ const getAvailableTools = async (req, res) => {
|
||||
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
|
||||
const isToolkit =
|
||||
plugin.toolkit === true &&
|
||||
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey);
|
||||
Object.keys(toolDefinitions).some(
|
||||
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
|
||||
);
|
||||
|
||||
if (!isToolDefined && !isToolkit) {
|
||||
continue;
|
||||
@@ -217,10 +187,12 @@ const getAvailableTools = async (req, res) => {
|
||||
|
||||
toolsOutput.push(toolToAdd);
|
||||
}
|
||||
|
||||
const finalTools = filterUniquePlugins(toolsOutput);
|
||||
await cache.set(CacheKeys.TOOLS, finalTools);
|
||||
res.status(200).json(finalTools);
|
||||
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...finalTools]);
|
||||
|
||||
res.status(200).json(dedupedTools);
|
||||
} catch (error) {
|
||||
logger.error('[getAvailableTools]', error);
|
||||
res.status(500).json({ message: error.message });
|
||||
|
||||
520
api/server/controllers/PluginController.spec.js
Normal file
520
api/server/controllers/PluginController.spec.js
Normal file
@@ -0,0 +1,520 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
// Mock the dependencies
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCustomConfig: jest.fn(),
|
||||
getCachedTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/ToolService', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
getMCPManager: jest.fn(() => ({
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
})),
|
||||
getFlowStateManager: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/app/clients/tools', () => ({
|
||||
availableTools: [],
|
||||
toolkits: [],
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
checkPluginAuth: jest.fn(),
|
||||
filterUniquePlugins: jest.fn(),
|
||||
convertMCPToolsToPlugins: jest.fn(),
|
||||
}));
|
||||
|
||||
// Import the actual module with the function we want to test
|
||||
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
|
||||
const {
|
||||
filterUniquePlugins,
|
||||
checkPluginAuth,
|
||||
convertMCPToolsToPlugins,
|
||||
getToolkitKey,
|
||||
} = require('@librechat/api');
|
||||
|
||||
describe('PluginController', () => {
|
||||
let mockReq, mockRes, mockCache;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockReq = { user: { id: 'test-user-id' } };
|
||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
});
|
||||
|
||||
describe('getAvailablePluginsController', () => {
|
||||
beforeEach(() => {
|
||||
mockReq.app = { locals: { filteredTools: [], includedTools: [] } };
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
|
||||
const mockPlugins = [
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(filterUniquePlugins).toHaveBeenCalled();
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
// The response includes authenticated: true for each plugin when checkPluginAuth returns true
|
||||
expect(mockRes.json).toHaveBeenCalledWith([
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First', authenticated: true },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second', authenticated: true },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify plugin authentication', async () => {
|
||||
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
||||
checkPluginAuth.mockReturnValueOnce(true);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData[0].authenticated).toBe(true);
|
||||
});
|
||||
|
||||
it('should return cached plugins when available', async () => {
|
||||
const cachedPlugins = [
|
||||
{ name: 'CachedPlugin', pluginKey: 'cached', description: 'Cached plugin' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(cachedPlugins);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(filterUniquePlugins).not.toHaveBeenCalled();
|
||||
expect(checkPluginAuth).not.toHaveBeenCalled();
|
||||
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
|
||||
});
|
||||
|
||||
it('should filter plugins based on includedTools', async () => {
|
||||
const mockPlugins = [
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
mockReq.app.locals.includedTools = ['key1'];
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData).toHaveLength(1);
|
||||
expect(responseData[0].pluginKey).toBe('key1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAvailableTools', () => {
|
||||
it('should use convertMCPToolsToPlugins for user-specific MCP tools', async () => {
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}server1`]: {
|
||||
function: { name: 'tool1', description: 'Tool 1' },
|
||||
},
|
||||
};
|
||||
const mockConvertedPlugins = [
|
||||
{
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}server1`,
|
||||
description: 'Tool 1',
|
||||
},
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue(mockConvertedPlugins);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: mockUserTools,
|
||||
customConfig: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
|
||||
const mockUserPlugins = [
|
||||
{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' },
|
||||
];
|
||||
const mockManifestPlugins = [
|
||||
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(mockManifestPlugins);
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
convertMCPToolsToPlugins.mockReturnValue(mockUserPlugins);
|
||||
filterUniquePlugins.mockReturnValue([...mockUserPlugins, ...mockManifestPlugins]);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should be called to deduplicate the combined array
|
||||
expect(filterUniquePlugins).toHaveBeenLastCalledWith([
|
||||
...mockUserPlugins,
|
||||
...mockManifestPlugins,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify authentication status', async () => {
|
||||
const mockPlugin = { name: 'Tool1', pluginKey: 'tool1', description: 'Tool 1' };
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock getCachedTools second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({ tool1: true });
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
||||
});
|
||||
|
||||
it('should use getToolkitKey for toolkit validation', async () => {
|
||||
const mockToolkit = {
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
description: 'Toolkit 1',
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
getToolkitKey.mockReturnValue('toolkit1');
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock getCachedTools second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({
|
||||
toolkit1_function: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(getToolkitKey).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('plugin.icon behavior', () => {
|
||||
const callGetAvailableToolsWithMCPServer = async (mcpServers) => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue({ mcpServers });
|
||||
|
||||
const functionTools = {
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: {
|
||||
function: { name: 'test-tool', description: 'A test tool' },
|
||||
},
|
||||
};
|
||||
|
||||
const mockConvertedPlugin = {
|
||||
name: 'test-tool',
|
||||
pluginKey: `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
description: 'A test tool',
|
||||
icon: mcpServers['test-server']?.iconPath,
|
||||
authenticated: true,
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
getCachedTools.mockResolvedValueOnce(functionTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue([mockConvertedPlugin]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
getToolkitKey.mockReturnValue(undefined);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
return responseData.find((tool) => tool.name === 'test-tool');
|
||||
};
|
||||
|
||||
it('should set plugin.icon when iconPath is defined', async () => {
|
||||
const mcpServers = {
|
||||
'test-server': {
|
||||
iconPath: '/path/to/icon.png',
|
||||
},
|
||||
};
|
||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
||||
expect(testTool.icon).toBe('/path/to/icon.png');
|
||||
});
|
||||
|
||||
it('should set plugin.icon to undefined when iconPath is not defined', async () => {
|
||||
const mcpServers = {
|
||||
'test-server': {},
|
||||
};
|
||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
||||
expect(testTool.icon).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('helper function integration', () => {
|
||||
it('should properly handle MCP tools with custom user variables', async () => {
|
||||
const customConfig = {
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: {
|
||||
API_KEY: { title: 'API Key', description: 'Your API key' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// We need to test the actual flow where MCP manager tools are included
|
||||
const mcpManagerTools = [
|
||||
{
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
authenticated: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Mock the MCP manager to return tools
|
||||
const mockMCPManager = {
|
||||
loadManifestTools: jest.fn().mockResolvedValue(mcpManagerTools),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(customConfig);
|
||||
|
||||
// First call returns user tools (empty in this case)
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Mock convertMCPToolsToPlugins to return empty array for user tools
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
|
||||
// Mock filterUniquePlugins to pass through
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
|
||||
// Mock checkPluginAuth
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
// Second call returns tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Find the MCP tool in the response
|
||||
const mcpTool = responseData.find(
|
||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}test-server`,
|
||||
);
|
||||
|
||||
// The actual implementation adds authConfig and sets authenticated to false when customUserVars exist
|
||||
expect(mcpTool).toBeDefined();
|
||||
expect(mcpTool.authConfig).toEqual([
|
||||
{ authField: 'API_KEY', label: 'API Key', description: 'Your API key' },
|
||||
]);
|
||||
expect(mcpTool.authenticated).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle error cases gracefully', async () => {
|
||||
mockCache.get.mockRejectedValue(new Error('Cache error'));
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({ message: 'Cache error' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases with undefined/null values', () => {
|
||||
it('should handle undefined cache gracefully', async () => {
|
||||
getLogStores.mockReturnValue(undefined);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
});
|
||||
|
||||
it('should handle null cachedTools and cachedUserTools', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: null,
|
||||
customConfig: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle when getCachedTools returns undefined', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue(undefined);
|
||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
// Mock getCachedTools to return undefined for both calls
|
||||
getCachedTools.mockReset();
|
||||
getCachedTools.mockResolvedValueOnce(undefined).mockResolvedValueOnce(undefined);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: undefined,
|
||||
customConfig: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle cachedToolsArray and userPlugins both being defined', async () => {
|
||||
const cachedTools = [{ name: 'CachedTool', pluginKey: 'cached-tool', description: 'Cached' }];
|
||||
const userTools = {
|
||||
'user-tool': { function: { name: 'user-tool', description: 'User tool' } },
|
||||
};
|
||||
const userPlugins = [{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' }];
|
||||
|
||||
mockCache.get.mockResolvedValue(cachedTools);
|
||||
getCachedTools.mockResolvedValue(userTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue(userPlugins);
|
||||
filterUniquePlugins.mockReturnValue([...userPlugins, ...cachedTools]);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([...userPlugins, ...cachedTools]);
|
||||
});
|
||||
|
||||
it('should handle empty toolDefinitions object', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// With empty tool definitions, no tools should be in the final output
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should handle MCP tools without customUserVars', async () => {
|
||||
const customConfig = {
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
// No customUserVars defined
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
||||
function: { name: 'tool1', description: 'Tool 1' },
|
||||
},
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(customConfig);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
|
||||
const mockPlugin = {
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
authenticated: true,
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
convertMCPToolsToPlugins.mockReturnValue([mockPlugin]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData[0].authenticated).toBe(true);
|
||||
// The actual implementation doesn't set authConfig on tools without customUserVars
|
||||
expect(responseData[0].authConfig).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle req.app.locals with undefined filteredTools and includedTools', async () => {
|
||||
mockReq.app = { locals: {} };
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
filterUniquePlugins.mockReturnValue([]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should handle toolkit with undefined toolDefinitions keys', async () => {
|
||||
const mockToolkit = {
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
description: 'Toolkit 1',
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
getToolkitKey.mockReturnValue(undefined);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock getCachedTools second call to return null
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle null toolDefinitions gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -99,10 +99,36 @@ const confirm2FA = async (req, res) => {
|
||||
|
||||
/**
|
||||
* Disable 2FA by clearing the stored secret and backup codes.
|
||||
* Requires verification with either TOTP token or backup code if 2FA is fully enabled.
|
||||
*/
|
||||
const disable2FA = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token, backupCode } = req.body;
|
||||
const user = await getUserById(userId);
|
||||
|
||||
if (!user || !user.totpSecret) {
|
||||
return res.status(400).json({ message: '2FA is not setup for this user' });
|
||||
}
|
||||
|
||||
if (user.twoFactorEnabled) {
|
||||
const secret = await getTOTPSecret(user.totpSecret);
|
||||
let isVerified = false;
|
||||
|
||||
if (token) {
|
||||
isVerified = await verifyTOTP(secret, token);
|
||||
} else if (backupCode) {
|
||||
isVerified = await verifyBackupCode({ user, backupCode });
|
||||
} else {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Either token or backup code is required to disable 2FA' });
|
||||
}
|
||||
|
||||
if (!isVerified) {
|
||||
return res.status(401).json({ message: 'Invalid token or backup code' });
|
||||
}
|
||||
}
|
||||
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
|
||||
return res.status(200).json();
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { webSearchKeys, extractWebSearchEnvVars } = require('@librechat/api');
|
||||
const { webSearchKeys, extractWebSearchEnvVars, normalizeHttpError } = require('@librechat/api');
|
||||
const {
|
||||
getFiles,
|
||||
updateUser,
|
||||
@@ -89,8 +89,8 @@ const updateUserPluginsController = async (req, res) => {
|
||||
|
||||
if (userPluginsService instanceof Error) {
|
||||
logger.error('[userPluginsService]', userPluginsService);
|
||||
const { status, message } = userPluginsService;
|
||||
res.status(status).send({ message });
|
||||
const { status, message } = normalizeHttpError(userPluginsService);
|
||||
return res.status(status).send({ message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +137,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
}
|
||||
} else if (action === 'uninstall') {
|
||||
@@ -151,7 +151,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
`[authService] Error deleting all auth for MCP tool ${pluginKey}:`,
|
||||
authService,
|
||||
);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
} else {
|
||||
// This handles:
|
||||
@@ -163,7 +163,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService] Error deleting specific auth key:', authService);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -175,14 +175,16 @@ const updateUserPluginsController = async (req, res) => {
|
||||
try {
|
||||
const mcpManager = getMCPManager(user.id);
|
||||
if (mcpManager) {
|
||||
// Extract server name from pluginKey (format: "mcp_<serverName>")
|
||||
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||
logger.info(
|
||||
`[updateUserPluginsController] Disconnecting MCP connections for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
||||
`[updateUserPluginsController] Disconnecting MCP server ${serverName} for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
||||
);
|
||||
await mcpManager.disconnectUserConnections(user.id);
|
||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
}
|
||||
} catch (disconnectError) {
|
||||
logger.error(
|
||||
`[updateUserPluginsController] Error disconnecting MCP connections for user ${user.id} after plugin auth update:`,
|
||||
`[updateUserPluginsController] Error disconnecting MCP connection for user ${user.id} after plugin auth update:`,
|
||||
disconnectError,
|
||||
);
|
||||
// Do not fail the request for this, but log it.
|
||||
@@ -191,7 +193,8 @@ const updateUserPluginsController = async (req, res) => {
|
||||
return res.status(status).send();
|
||||
}
|
||||
|
||||
res.status(status).send({ message });
|
||||
const normalized = normalizeHttpError({ status, message });
|
||||
return res.status(normalized.status).send({ message: normalized.message });
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginsController]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong.' });
|
||||
|
||||
@@ -11,6 +11,7 @@ const {
|
||||
handleToolCalls,
|
||||
ChatModelStreamHandler,
|
||||
} = require('@librechat/agents');
|
||||
const { processFileCitations } = require('~/server/services/Files/Citations');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||
@@ -238,6 +239,31 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.file_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const user = req.user;
|
||||
const attachment = await processFileCitations({
|
||||
user,
|
||||
metadata,
|
||||
toolArtifact: output.artifact,
|
||||
toolCallId: output.tool_call_id,
|
||||
});
|
||||
if (!attachment) {
|
||||
return null;
|
||||
}
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing file citations:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.web_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
Callback,
|
||||
Providers,
|
||||
GraphEvents,
|
||||
TitleMethod,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
getTokenCountForMessage,
|
||||
@@ -70,7 +71,11 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
||||
if (isAgentsEndpoint(endpoint)) {
|
||||
return { model: undefined };
|
||||
} else if (endpoint === EModelEndpoint.bedrock) {
|
||||
return bedrockInputSchema.parse(agent.model_parameters);
|
||||
const parsedValues = bedrockInputSchema.parse(agent.model_parameters);
|
||||
if (parsedValues.thinking == null) {
|
||||
parsedValues.thinking = false;
|
||||
}
|
||||
return parsedValues;
|
||||
}
|
||||
return req.body.endpointOption.model_parameters;
|
||||
};
|
||||
@@ -397,6 +402,34 @@ class AgentClient extends BaseClient {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a promise that resolves with the memory promise result or undefined after a timeout
|
||||
* @param {Promise<(TAttachment | null)[] | undefined>} memoryPromise - The memory promise to await
|
||||
* @param {number} timeoutMs - Timeout in milliseconds (default: 3000)
|
||||
* @returns {Promise<(TAttachment | null)[] | undefined>}
|
||||
*/
|
||||
async awaitMemoryWithTimeout(memoryPromise, timeoutMs = 3000) {
|
||||
if (!memoryPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const timeoutPromise = new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Memory processing timeout')), timeoutMs),
|
||||
);
|
||||
|
||||
const attachments = await Promise.race([memoryPromise, timeoutPromise]);
|
||||
return attachments;
|
||||
} catch (error) {
|
||||
if (error.message === 'Memory processing timeout') {
|
||||
logger.warn('[AgentClient] Memory processing timed out after 3 seconds');
|
||||
} else {
|
||||
logger.error('[AgentClient] Error processing memory:', error);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<string | undefined>}
|
||||
*/
|
||||
@@ -507,6 +540,39 @@ class AgentClient extends BaseClient {
|
||||
return withoutKeys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters out image URLs from message content
|
||||
* @param {BaseMessage} message - The message to filter
|
||||
* @returns {BaseMessage} - A new message with image URLs removed
|
||||
*/
|
||||
filterImageUrls(message) {
|
||||
if (!message.content || typeof message.content === 'string') {
|
||||
return message;
|
||||
}
|
||||
|
||||
if (Array.isArray(message.content)) {
|
||||
const filteredContent = message.content.filter(
|
||||
(part) => part.type !== ContentTypes.IMAGE_URL,
|
||||
);
|
||||
|
||||
if (filteredContent.length === 1 && filteredContent[0].type === ContentTypes.TEXT) {
|
||||
const MessageClass = message.constructor;
|
||||
return new MessageClass({
|
||||
content: filteredContent[0].text,
|
||||
additional_kwargs: message.additional_kwargs,
|
||||
});
|
||||
}
|
||||
|
||||
const MessageClass = message.constructor;
|
||||
return new MessageClass({
|
||||
content: filteredContent,
|
||||
additional_kwargs: message.additional_kwargs,
|
||||
});
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {BaseMessage[]} messages
|
||||
* @returns {Promise<void | (TAttachment | null)[]>}
|
||||
@@ -535,7 +601,8 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
const bufferString = getBufferString(messagesToProcess);
|
||||
const filteredMessages = messagesToProcess.map((msg) => this.filterImageUrls(msg));
|
||||
const bufferString = getBufferString(filteredMessages);
|
||||
const bufferMessage = new HumanMessage(`# Current Chat:\n\n${bufferString}`);
|
||||
return await this.processMemory([bufferMessage]);
|
||||
} catch (error) {
|
||||
@@ -730,6 +797,9 @@ class AgentClient extends BaseClient {
|
||||
if (i > 0) {
|
||||
this.model = agent.model_parameters.model;
|
||||
}
|
||||
if (i > 0 && config.signal == null) {
|
||||
config.signal = abortController.signal;
|
||||
}
|
||||
if (agent.recursion_limit && typeof agent.recursion_limit === 'number') {
|
||||
config.recursionLimit = agent.recursion_limit;
|
||||
}
|
||||
@@ -768,7 +838,7 @@ class AgentClient extends BaseClient {
|
||||
|
||||
if (noSystemMessages === true && systemContent?.length) {
|
||||
const latestMessageContent = _messages.pop().content;
|
||||
if (typeof latestMessage !== 'string') {
|
||||
if (typeof latestMessageContent !== 'string') {
|
||||
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
||||
_messages.push(new HumanMessage({ content: latestMessageContent }));
|
||||
} else {
|
||||
@@ -960,12 +1030,11 @@ class AgentClient extends BaseClient {
|
||||
});
|
||||
|
||||
try {
|
||||
if (memoryPromise) {
|
||||
const attachments = await memoryPromise;
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
|
||||
await this.recordCollectedUsage({ context: 'message' });
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
@@ -974,11 +1043,9 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
if (memoryPromise) {
|
||||
const attachments = await memoryPromise;
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||
@@ -1009,25 +1076,40 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||
const { req, res, agent } = this.options;
|
||||
const endpoint = agent.endpoint;
|
||||
let endpoint = agent.endpoint;
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
let clientOptions = {
|
||||
maxTokens: 75,
|
||||
model: agent.model_parameters.model,
|
||||
model: agent.model || agent.model_parameters.model,
|
||||
};
|
||||
|
||||
const { getOptions, overrideProvider, customEndpointConfig } =
|
||||
await getProviderConfig(endpoint);
|
||||
let titleProviderConfig = await getProviderConfig(endpoint);
|
||||
|
||||
/** @type {TEndpoint | undefined} */
|
||||
const endpointConfig = req.app.locals[endpoint] ?? customEndpointConfig;
|
||||
const endpointConfig =
|
||||
req.app.locals.all ?? req.app.locals[endpoint] ?? titleProviderConfig.customEndpointConfig;
|
||||
if (!endpointConfig) {
|
||||
logger.warn(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config',
|
||||
);
|
||||
}
|
||||
|
||||
if (endpointConfig?.titleEndpoint && endpointConfig.titleEndpoint !== endpoint) {
|
||||
try {
|
||||
titleProviderConfig = await getProviderConfig(endpointConfig.titleEndpoint);
|
||||
endpoint = endpointConfig.titleEndpoint;
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[api/server/controllers/agents/client.js #titleConvo] Error getting title endpoint config for ${endpointConfig.titleEndpoint}, falling back to default`,
|
||||
error,
|
||||
);
|
||||
// Fall back to original provider config
|
||||
endpoint = agent.endpoint;
|
||||
titleProviderConfig = await getProviderConfig(endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
endpointConfig &&
|
||||
endpointConfig.titleModel &&
|
||||
@@ -1036,7 +1118,7 @@ class AgentClient extends BaseClient {
|
||||
clientOptions.model = endpointConfig.titleModel;
|
||||
}
|
||||
|
||||
const options = await getOptions({
|
||||
const options = await titleProviderConfig.getOptions({
|
||||
req,
|
||||
res,
|
||||
optionsOnly: true,
|
||||
@@ -1045,7 +1127,7 @@ class AgentClient extends BaseClient {
|
||||
endpointOption: { model_parameters: clientOptions },
|
||||
});
|
||||
|
||||
let provider = options.provider ?? overrideProvider ?? agent.provider;
|
||||
let provider = options.provider ?? titleProviderConfig.overrideProvider ?? agent.provider;
|
||||
if (
|
||||
endpoint === EModelEndpoint.azureOpenAI &&
|
||||
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||
@@ -1065,11 +1147,16 @@ class AgentClient extends BaseClient {
|
||||
clientOptions.configuration = options.configOptions;
|
||||
}
|
||||
|
||||
// Ensure maxTokens is set for non-o1 models
|
||||
if (!/\b(o\d)\b/i.test(clientOptions.model) && !clientOptions.maxTokens) {
|
||||
clientOptions.maxTokens = 75;
|
||||
} else if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
const shouldRemoveMaxTokens = /\b(o\d|gpt-[5-9])\b/i.test(clientOptions.model);
|
||||
if (shouldRemoveMaxTokens && clientOptions.maxTokens != null) {
|
||||
delete clientOptions.maxTokens;
|
||||
} else if (!shouldRemoveMaxTokens && !clientOptions.maxTokens) {
|
||||
clientOptions.maxTokens = 75;
|
||||
}
|
||||
if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_completion_tokens != null) {
|
||||
delete clientOptions.modelKwargs.max_completion_tokens;
|
||||
} else if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_output_tokens != null) {
|
||||
delete clientOptions.modelKwargs.max_output_tokens;
|
||||
}
|
||||
|
||||
clientOptions = Object.assign(
|
||||
@@ -1078,16 +1165,23 @@ class AgentClient extends BaseClient {
|
||||
),
|
||||
);
|
||||
|
||||
if (provider === Providers.GOOGLE) {
|
||||
if (
|
||||
provider === Providers.GOOGLE &&
|
||||
(endpointConfig?.titleMethod === TitleMethod.FUNCTIONS ||
|
||||
endpointConfig?.titleMethod === TitleMethod.STRUCTURED)
|
||||
) {
|
||||
clientOptions.json = true;
|
||||
}
|
||||
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
provider,
|
||||
clientOptions,
|
||||
inputText: text,
|
||||
contentParts: this.contentParts,
|
||||
clientOptions,
|
||||
titleMethod: endpointConfig?.titleMethod,
|
||||
titlePrompt: endpointConfig?.titlePrompt,
|
||||
titlePromptTemplate: endpointConfig?.titlePromptTemplate,
|
||||
chainOptions: {
|
||||
signal: abortController.signal,
|
||||
callbacks: [
|
||||
|
||||
1190
api/server/controllers/agents/client.test.js
Normal file
1190
api/server/controllers/agents/client.test.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -105,8 +105,6 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
return res.end();
|
||||
}
|
||||
await cache.delete(cacheKey);
|
||||
// const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
// logger.debug(`[${originPath}] Cancelled run:`, cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error cancelling run`, error);
|
||||
}
|
||||
@@ -115,7 +113,6 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
|
||||
let run;
|
||||
try {
|
||||
// run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
@@ -128,18 +125,9 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
|
||||
let finalEvent;
|
||||
try {
|
||||
// const errorContentPart = {
|
||||
// text: {
|
||||
// value:
|
||||
// error?.message ?? 'There was an error processing your request. Please try again later.',
|
||||
// },
|
||||
// type: ContentTypes.ERROR,
|
||||
// };
|
||||
|
||||
finalEvent = {
|
||||
final: true,
|
||||
conversation: await getConvo(req.user.id, conversationId),
|
||||
// runMessages,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error finalizing error process`, error);
|
||||
|
||||
@@ -233,6 +233,26 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
);
|
||||
}
|
||||
}
|
||||
// Edge case: sendMessage completed but abort happened during sendCompletion
|
||||
// We need to ensure a final event is sent
|
||||
else if (!res.headersSent && !res.finished) {
|
||||
logger.debug(
|
||||
'[AgentController] Handling edge case: `sendMessage` completed but aborted during `sendCompletion`',
|
||||
);
|
||||
|
||||
const finalResponse = { ...response };
|
||||
finalResponse.error = true;
|
||||
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
responseMessage: finalResponse,
|
||||
error: { message: 'Request was aborted during completion' },
|
||||
});
|
||||
res.end();
|
||||
}
|
||||
|
||||
// Save user message if needed
|
||||
if (!client.skipSaveUserMessage) {
|
||||
|
||||
@@ -1,33 +1,38 @@
|
||||
const { z } = require('zod');
|
||||
const fs = require('fs').promises;
|
||||
const { nanoid } = require('nanoid');
|
||||
const { logger, PermissionBits } = require('@librechat/data-schemas');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { agentCreateSchema, agentUpdateSchema } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
SystemRoles,
|
||||
FileSources,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
EToolResources,
|
||||
PermissionBits,
|
||||
actionDelimiter,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getAgent,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgentsByAccess,
|
||||
countPromotedAgents,
|
||||
revertAgentVersion,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getAgent,
|
||||
} = require('~/models/Agent');
|
||||
const {
|
||||
grantPermission,
|
||||
findAccessibleResources,
|
||||
findPubliclyAccessibleResources,
|
||||
findAccessibleResources,
|
||||
hasPublicPermission,
|
||||
grantPermission,
|
||||
} = require('~/server/services/PermissionService');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
|
||||
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
|
||||
const { filterFile } = require('~/server/services/Files/process');
|
||||
const { updateAction, getActions } = require('~/models/Action');
|
||||
@@ -47,7 +52,7 @@ const systemTools = {
|
||||
* @param {ServerRequest} req - The request object.
|
||||
* @param {AgentCreateParams} req.body - The request body.
|
||||
* @param {ServerResponse} res - The response object.
|
||||
* @returns {Agent} 201 - success response - application/json
|
||||
* @returns {Promise<Agent>} 201 - success response - application/json
|
||||
*/
|
||||
const createAgentHandler = async (req, res) => {
|
||||
try {
|
||||
@@ -76,11 +81,11 @@ const createAgentHandler = async (req, res) => {
|
||||
// Automatically grant owner permissions to the creator
|
||||
try {
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_owner',
|
||||
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||
grantedBy: userId,
|
||||
});
|
||||
logger.debug(
|
||||
@@ -145,7 +150,7 @@ const getAgentHandler = async (req, res, expandProperties = false) => {
|
||||
|
||||
// Check if agent is public
|
||||
const isPublic = await hasPublicPermission({
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
@@ -212,6 +217,9 @@ const updateAgentHandler = async (req, res) => {
|
||||
})
|
||||
: existingAgent;
|
||||
|
||||
// Add version count to the response
|
||||
updatedAgent.version = updatedAgent.versions ? updatedAgent.versions.length : 0;
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
@@ -339,11 +347,11 @@ const duplicateAgentHandler = async (req, res) => {
|
||||
// Automatically grant owner permissions to the duplicator
|
||||
try {
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: newAgent._id,
|
||||
accessRoleId: 'agent_owner',
|
||||
accessRoleId: AccessRoleIds.AGENT_OWNER,
|
||||
grantedBy: userId,
|
||||
});
|
||||
logger.debug(
|
||||
@@ -436,11 +444,12 @@ const getListAgentsHandler = async (req, res) => {
|
||||
// Get agent IDs the user has VIEW access to via ACL
|
||||
const accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
resourceType: 'agent',
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermissions: requiredPermission,
|
||||
});
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
// Use the new ACL-aware function
|
||||
@@ -491,7 +500,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
@@ -502,7 +511,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
|
||||
const buffer = await fs.readFile(req.file.path);
|
||||
|
||||
const fileStrategy = req.app.locals.fileStrategy;
|
||||
const fileStrategy = getFileStrategy(req.app.locals, { isAvatar: true });
|
||||
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId: req.user.id,
|
||||
@@ -599,7 +608,7 @@ const revertAgentVersionHandler = async (req, res) => {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
|
||||
@@ -41,7 +42,27 @@ jest.mock('~/models/File', () => ({
|
||||
deleteFileByFilter: jest.fn(),
|
||||
}));
|
||||
|
||||
const { createAgent: createAgentHandler, updateAgent: updateAgentHandler } = require('./v1');
|
||||
jest.mock('~/server/services/PermissionService', () => ({
|
||||
findAccessibleResources: jest.fn().mockResolvedValue([]),
|
||||
findPubliclyAccessibleResources: jest.fn().mockResolvedValue([]),
|
||||
grantPermission: jest.fn(),
|
||||
hasPublicPermission: jest.fn().mockResolvedValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('~/models', () => ({
|
||||
getCategoriesWithCounts: jest.fn(),
|
||||
}));
|
||||
|
||||
const {
|
||||
createAgent: createAgentHandler,
|
||||
updateAgent: updateAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
} = require('./v1');
|
||||
|
||||
const {
|
||||
findAccessibleResources,
|
||||
findPubliclyAccessibleResources,
|
||||
} = require('~/server/services/PermissionService');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
||||
@@ -79,6 +100,7 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
},
|
||||
body: {},
|
||||
params: {},
|
||||
query: {},
|
||||
app: {
|
||||
locals: {
|
||||
fileStrategy: 'local',
|
||||
@@ -235,6 +257,81 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
expect(agentInDb.tool_resources.invalid_resource).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle support_contact with empty strings', async () => {
|
||||
const dataWithEmptyContact = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Empty Contact',
|
||||
support_contact: {
|
||||
name: '',
|
||||
email: '',
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = dataWithEmptyContact;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(createdAgent.name).toBe('Agent with Empty Contact');
|
||||
expect(createdAgent.support_contact).toBeDefined();
|
||||
expect(createdAgent.support_contact.name).toBe('');
|
||||
expect(createdAgent.support_contact.email).toBe('');
|
||||
});
|
||||
|
||||
test('should handle support_contact with valid email', async () => {
|
||||
const dataWithValidContact = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Valid Contact',
|
||||
support_contact: {
|
||||
name: 'Support Team',
|
||||
email: 'support@example.com',
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = dataWithValidContact;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(createdAgent.support_contact).toBeDefined();
|
||||
expect(createdAgent.support_contact.name).toBe('Support Team');
|
||||
expect(createdAgent.support_contact.email).toBe('support@example.com');
|
||||
});
|
||||
|
||||
test('should reject support_contact with invalid email', async () => {
|
||||
const dataWithInvalidEmail = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Invalid Email',
|
||||
support_contact: {
|
||||
name: 'Support',
|
||||
email: 'not-an-email',
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = dataWithInvalidEmail;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: 'Invalid request data',
|
||||
details: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
path: ['support_contact', 'email'],
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle avatar validation', async () => {
|
||||
const dataWithAvatar = {
|
||||
provider: 'openai',
|
||||
@@ -452,6 +549,28 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
expect(mockRes.json).toHaveBeenCalledWith({ error: 'Agent not found' });
|
||||
});
|
||||
|
||||
test('should include version field in update response', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Updated with Version Check',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify version field is included and is a number
|
||||
expect(updatedAgent).toHaveProperty('version');
|
||||
expect(typeof updatedAgent.version).toBe('number');
|
||||
expect(updatedAgent.version).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(updatedAgent.version).toBe(agentInDb.versions.length);
|
||||
});
|
||||
|
||||
test('should handle validation errors properly', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
@@ -571,4 +690,373 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
expect(agentInDb.futureFeature).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getListAgentsHandler - Security Tests', () => {
|
||||
let userA, userB;
|
||||
let agentA1, agentA2, agentA3, agentB1;
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create two test users
|
||||
userA = new mongoose.Types.ObjectId();
|
||||
userB = new mongoose.Types.ObjectId();
|
||||
|
||||
// Create agents for User A
|
||||
agentA1 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
agentA2 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
agentA3 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
category: 'productivity',
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
category: 'productivity',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Create an agent for User B
|
||||
agentB1 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userB,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('should return empty list when user has no accessible agents', async () => {
|
||||
// User B has no permissions and no owned agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(findAccessibleResources).toHaveBeenCalledWith({
|
||||
userId: userB.toString(),
|
||||
role: 'USER',
|
||||
resourceType: 'agent',
|
||||
requiredPermissions: 1, // VIEW permission
|
||||
});
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
object: 'list',
|
||||
data: [],
|
||||
first_id: null,
|
||||
last_id: null,
|
||||
has_more: false,
|
||||
after: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('should not return other users agents when accessibleIds is empty', async () => {
|
||||
// User B trying to see agents with no permissions
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(0);
|
||||
|
||||
// Verify User A's agents are not included
|
||||
const agentIds = response.data.map((a) => a.id);
|
||||
expect(agentIds).not.toContain(agentA1.id);
|
||||
expect(agentIds).not.toContain(agentA2.id);
|
||||
expect(agentIds).not.toContain(agentA3.id);
|
||||
});
|
||||
|
||||
test('should only return agents user has access to', async () => {
|
||||
// User B has access to one of User A's agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA1.id);
|
||||
expect(response.data[0].name).toBe('Agent A1');
|
||||
});
|
||||
|
||||
test('should return multiple accessible agents', async () => {
|
||||
// User B has access to multiple agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA3._id, agentB1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(3);
|
||||
|
||||
const agentIds = response.data.map((a) => a.id);
|
||||
expect(agentIds).toContain(agentA1.id);
|
||||
expect(agentIds).toContain(agentA3.id);
|
||||
expect(agentIds).toContain(agentB1.id);
|
||||
expect(agentIds).not.toContain(agentA2.id);
|
||||
});
|
||||
|
||||
test('should apply category filter correctly with ACL', async () => {
|
||||
// User has access to all agents but filters by category
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.category = 'productivity';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA3.id);
|
||||
expect(response.data[0].category).toBe('productivity');
|
||||
});
|
||||
|
||||
test('should apply search filter correctly with ACL', async () => {
|
||||
// User has access to multiple agents but searches for specific one
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.search = 'A2';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA2.id);
|
||||
});
|
||||
|
||||
test('should handle pagination with ACL filtering', async () => {
|
||||
// Create more agents for pagination testing
|
||||
const moreAgents = [];
|
||||
for (let i = 4; i <= 10; i++) {
|
||||
const agent = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
moreAgents.push(agent);
|
||||
}
|
||||
|
||||
// User has access to all agents
|
||||
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.limit = '5';
|
||||
findAccessibleResources.mockResolvedValue(allAgentIds);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(5);
|
||||
expect(response.has_more).toBe(true);
|
||||
expect(response.after).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should mark publicly accessible agents', async () => {
|
||||
// User has access to agents, some are public
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([agentA2._id]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(2);
|
||||
|
||||
const publicAgent = response.data.find((a) => a.id === agentA2.id);
|
||||
const privateAgent = response.data.find((a) => a.id === agentA1.id);
|
||||
|
||||
expect(publicAgent.isPublic).toBe(true);
|
||||
expect(privateAgent.isPublic).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle requiredPermission parameter', async () => {
|
||||
// Test with different permission levels
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.requiredPermission = '15'; // FULL_ACCESS
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(findAccessibleResources).toHaveBeenCalledWith({
|
||||
userId: userB.toString(),
|
||||
role: 'USER',
|
||||
resourceType: 'agent',
|
||||
requiredPermissions: 15,
|
||||
});
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should handle promoted filter with ACL', async () => {
|
||||
// Create a promoted agent
|
||||
const promotedAgent = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Promoted Agent',
|
||||
description: 'A promoted agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
is_promoted: true,
|
||||
versions: [
|
||||
{
|
||||
name: 'Promoted Agent',
|
||||
description: 'A promoted agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
is_promoted: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.promoted = '1';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, promotedAgent._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(promotedAgent.id);
|
||||
expect(response.data[0].is_promoted).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle errors gracefully', async () => {
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockRejectedValue(new Error('Permission service error'));
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
error: 'Permission service error',
|
||||
});
|
||||
});
|
||||
|
||||
test('should respect combined filters with ACL', async () => {
|
||||
// Create agents with specific attributes
|
||||
const productivityPromoted = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Productivity Pro',
|
||||
description: 'A promoted productivity agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
category: 'productivity',
|
||||
is_promoted: true,
|
||||
versions: [
|
||||
{
|
||||
name: 'Productivity Pro',
|
||||
description: 'A promoted productivity agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
category: 'productivity',
|
||||
is_promoted: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.category = 'productivity';
|
||||
mockReq.query.promoted = '1';
|
||||
findAccessibleResources.mockResolvedValue([
|
||||
agentA1._id,
|
||||
agentA2._id,
|
||||
agentA3._id,
|
||||
productivityPromoted._id,
|
||||
]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(productivityPromoted.id);
|
||||
expect(response.data[0].category).toBe('productivity');
|
||||
expect(response.data[0].is_promoted).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -152,7 +152,7 @@ const chatV1 = async (req, res) => {
|
||||
return res.end();
|
||||
}
|
||||
await cache.delete(cacheKey);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(run_id, { thread_id });
|
||||
logger.debug('[/assistants/chat/] Cancelled run:', cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error('[/assistants/chat/] Error cancelling run', error);
|
||||
@@ -162,7 +162,7 @@ const chatV1 = async (req, res) => {
|
||||
|
||||
let run;
|
||||
try {
|
||||
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
run = await openai.beta.threads.runs.retrieve(run_id, { thread_id });
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
@@ -623,7 +623,7 @@ const chatV1 = async (req, res) => {
|
||||
|
||||
if (!response.run.usage) {
|
||||
await sleep(3000);
|
||||
completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id);
|
||||
completedRun = await openai.beta.threads.runs.retrieve(response.run.id, { thread_id });
|
||||
if (completedRun.usage) {
|
||||
await recordUsage({
|
||||
...completedRun.usage,
|
||||
|
||||
@@ -467,7 +467,7 @@ const chatV2 = async (req, res) => {
|
||||
|
||||
if (!response.run.usage) {
|
||||
await sleep(3000);
|
||||
completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id);
|
||||
completedRun = await openai.beta.threads.runs.retrieve(response.run.id, { thread_id });
|
||||
if (completedRun.usage) {
|
||||
await recordUsage({
|
||||
...completedRun.usage,
|
||||
|
||||
@@ -108,7 +108,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
return res.end();
|
||||
}
|
||||
await cache.delete(cacheKey);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(run_id, { thread_id });
|
||||
logger.debug(`[${originPath}] Cancelled run:`, cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error(`[${originPath}] Error cancelling run`, error);
|
||||
@@ -118,7 +118,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
|
||||
let run;
|
||||
try {
|
||||
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
run = await openai.beta.threads.runs.retrieve(run_id, { thread_id });
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
|
||||
@@ -173,6 +173,16 @@ const listAssistantsForAzure = async ({ req, res, version, azureConfig = {}, que
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Initializes the OpenAI client.
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {ServerRequest} params.req - The request object.
|
||||
* @param {ServerResponse} params.res - The response object.
|
||||
* @param {TEndpointOption} params.endpointOption - The endpoint options.
|
||||
* @param {boolean} params.initAppClient - Whether to initialize the app client.
|
||||
* @param {string} params.overrideEndpoint - The endpoint to override.
|
||||
* @returns {Promise<{ openai: OpenAIClient, openAIApiKey: string; client: import('~/app/clients/OpenAIClient') }>} - The initialized OpenAI client.
|
||||
*/
|
||||
async function getOpenAIClient({ req, res, endpointOption, initAppClient, overrideEndpoint }) {
|
||||
let endpoint = overrideEndpoint ?? req.body.endpoint ?? req.query.endpoint;
|
||||
const version = await getCurrentVersion(req, endpoint);
|
||||
|
||||
@@ -197,7 +197,7 @@ const deleteAssistant = async (req, res) => {
|
||||
await validateAuthor({ req, openai });
|
||||
|
||||
const assistant_id = req.params.id;
|
||||
const deletionStatus = await openai.beta.assistants.del(assistant_id);
|
||||
const deletionStatus = await openai.beta.assistants.delete(assistant_id);
|
||||
if (deletionStatus?.deleted) {
|
||||
await deleteAssistantActions({ req, assistant_id });
|
||||
}
|
||||
@@ -365,7 +365,7 @@ const uploadAssistantAvatar = async (req, res) => {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
logger.debug('[/:agent_id/avatar] Temp. image upload file deleted');
|
||||
} catch (error) {
|
||||
} catch {
|
||||
logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,15 +8,13 @@ const express = require('express');
|
||||
const passport = require('passport');
|
||||
const compression = require('compression');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const { isEnabled, ErrorController } = require('@librechat/api');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const initializeMCP = require('./services/initializeMCP');
|
||||
const initializeMCPs = require('./services/initializeMCPs');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
const AppService = require('./services/AppService');
|
||||
const staticCache = require('./utils/staticCache');
|
||||
@@ -122,8 +120,7 @@ const startServer = async () => {
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use('/api/mcp', routes.mcp);
|
||||
|
||||
// Add the error controller one more time after all routes
|
||||
app.use(errorController);
|
||||
app.use(ErrorController);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.set({
|
||||
@@ -148,7 +145,7 @@ const startServer = async () => {
|
||||
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||
}
|
||||
|
||||
initializeMCP(app);
|
||||
initializeMCPs(app);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ async function healthCheckPoll(app, retries = 0) {
|
||||
if (response.status === 200) {
|
||||
return; // App is healthy
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
// Ignore connection errors during polling
|
||||
}
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ async function abortRun(req, res) {
|
||||
|
||||
try {
|
||||
await cache.set(cacheKey, 'cancelled', three_minutes);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||
const cancelledRun = await openai.beta.threads.runs.cancel(run_id, { thread_id });
|
||||
logger.debug('[abortRun] Cancelled run:', cancelledRun);
|
||||
} catch (error) {
|
||||
logger.error('[abortRun] Error cancelling run', error);
|
||||
@@ -60,7 +60,7 @@ async function abortRun(req, res) {
|
||||
}
|
||||
|
||||
try {
|
||||
const run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||
const run = await openai.beta.threads.runs.retrieve(run_id, { thread_id });
|
||||
await recordUsage({
|
||||
...run.usage,
|
||||
model: run.model,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Constants, isAgentsEndpoint } = require('librechat-data-provider');
|
||||
const { Constants, isAgentsEndpoint, ResourceType } = require('librechat-data-provider');
|
||||
const { canAccessResource } = require('./canAccessResource');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
|
||||
@@ -67,7 +67,7 @@ const canAccessAgentFromBody = (options) => {
|
||||
}
|
||||
|
||||
const agentAccessMiddleware = canAccessResource({
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermission,
|
||||
resourceIdParam: 'agent_id', // This will be ignored since we use custom resolver
|
||||
idResolver: () => resolveAgentIdFromBody(agentId),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { ResourceType } = require('librechat-data-provider');
|
||||
const { canAccessResource } = require('./canAccessResource');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
|
||||
/**
|
||||
* Agent ID resolver function
|
||||
@@ -46,7 +47,7 @@ const canAccessAgentResource = (options) => {
|
||||
}
|
||||
|
||||
return canAccessResource({
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermission,
|
||||
resourceIdParam,
|
||||
idResolver: resolveAgentId,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { ResourceType, PrincipalType, PrincipalModel } = require('librechat-data-provider');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { canAccessAgentResource } = require('./canAccessAgentResource');
|
||||
const { User, Role, AclEntry } = require('~/db/models');
|
||||
@@ -42,7 +43,7 @@ describe('canAccessAgentResource middleware', () => {
|
||||
});
|
||||
|
||||
req = {
|
||||
user: { id: testUser._id.toString(), role: 'test-role' },
|
||||
user: { id: testUser._id, role: testUser.role },
|
||||
params: {},
|
||||
};
|
||||
res = {
|
||||
@@ -96,10 +97,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry for the author (owner permissions)
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions (1+2+4+8)
|
||||
grantedBy: testUser._id,
|
||||
@@ -133,10 +134,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry for the other user (owner)
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions
|
||||
grantedBy: otherUser._id,
|
||||
@@ -174,10 +175,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry granting view permission to test user
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 1, // VIEW permission
|
||||
grantedBy: otherUser._id,
|
||||
@@ -211,10 +212,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry granting only view permission
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 1, // VIEW permission only
|
||||
grantedBy: otherUser._id,
|
||||
@@ -258,10 +259,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry for the author
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions
|
||||
grantedBy: testUser._id,
|
||||
@@ -294,10 +295,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry with all permissions for the owner
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions (1+2+4+8)
|
||||
grantedBy: testUser._id,
|
||||
@@ -354,10 +355,10 @@ describe('canAccessAgentResource middleware', () => {
|
||||
|
||||
// Create ACL entry for the author
|
||||
await AclEntry.create({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: 'User',
|
||||
resourceType: 'agent',
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions
|
||||
grantedBy: testUser._id,
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
const { ResourceType } = require('librechat-data-provider');
|
||||
const { canAccessResource } = require('./canAccessResource');
|
||||
const { getPromptGroup } = require('~/models/Prompt');
|
||||
|
||||
/**
|
||||
* PromptGroup ID resolver function
|
||||
* Resolves promptGroup ID to MongoDB ObjectId
|
||||
*
|
||||
* @param {string} groupId - PromptGroup ID from route parameter
|
||||
* @returns {Promise<Object|null>} PromptGroup document with _id field, or null if not found
|
||||
*/
|
||||
const resolvePromptGroupId = async (groupId) => {
|
||||
return await getPromptGroup({ _id: groupId });
|
||||
};
|
||||
|
||||
/**
|
||||
* PromptGroup-specific middleware factory that creates middleware to check promptGroup access permissions.
|
||||
* This middleware extends the generic canAccessResource to handle promptGroup ID resolution.
|
||||
*
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
|
||||
* @param {string} [options.resourceIdParam='groupId'] - The name of the route parameter containing the promptGroup ID
|
||||
* @returns {Function} Express middleware function
|
||||
*
|
||||
* @example
|
||||
* // Basic usage for viewing promptGroups
|
||||
* router.get('/prompts/groups/:groupId',
|
||||
* canAccessPromptGroupResource({ requiredPermission: 1 }),
|
||||
* getPromptGroup
|
||||
* );
|
||||
*
|
||||
* @example
|
||||
* // Custom resource ID parameter and edit permission
|
||||
* router.patch('/prompts/groups/:id',
|
||||
* canAccessPromptGroupResource({
|
||||
* requiredPermission: 2,
|
||||
* resourceIdParam: 'id'
|
||||
* }),
|
||||
* updatePromptGroup
|
||||
* );
|
||||
*/
|
||||
const canAccessPromptGroupResource = (options) => {
|
||||
const { requiredPermission, resourceIdParam = 'groupId' } = options;
|
||||
|
||||
if (!requiredPermission || typeof requiredPermission !== 'number') {
|
||||
throw new Error(
|
||||
'canAccessPromptGroupResource: requiredPermission is required and must be a number',
|
||||
);
|
||||
}
|
||||
|
||||
return canAccessResource({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermission,
|
||||
resourceIdParam,
|
||||
idResolver: resolvePromptGroupId,
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
canAccessPromptGroupResource,
|
||||
};
|
||||
@@ -0,0 +1,55 @@
|
||||
const { ResourceType } = require('librechat-data-provider');
|
||||
const { canAccessResource } = require('./canAccessResource');
|
||||
const { getPrompt } = require('~/models/Prompt');
|
||||
|
||||
/**
|
||||
* Prompt to PromptGroup ID resolver function
|
||||
* Resolves prompt ID to its parent promptGroup ID
|
||||
*
|
||||
* @param {string} promptId - Prompt ID from route parameter
|
||||
* @returns {Promise<Object|null>} Object with promptGroup's _id field, or null if not found
|
||||
*/
|
||||
const resolvePromptToGroupId = async (promptId) => {
|
||||
const prompt = await getPrompt({ _id: promptId });
|
||||
if (!prompt || !prompt.groupId) {
|
||||
return null;
|
||||
}
|
||||
// Return an object with _id that matches the promptGroup ID
|
||||
return { _id: prompt.groupId };
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware factory that checks promptGroup permissions when accessing individual prompts.
|
||||
* This allows permission management at the promptGroup level while still supporting
|
||||
* individual prompt access patterns.
|
||||
*
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
|
||||
* @param {string} [options.resourceIdParam='promptId'] - The name of the route parameter containing the prompt ID
|
||||
* @returns {Function} Express middleware function
|
||||
*
|
||||
* @example
|
||||
* // Check promptGroup permissions when viewing a prompt
|
||||
* router.get('/prompts/:promptId',
|
||||
* canAccessPromptViaGroup({ requiredPermission: 1 }),
|
||||
* getPrompt
|
||||
* );
|
||||
*/
|
||||
const canAccessPromptViaGroup = (options) => {
|
||||
const { requiredPermission, resourceIdParam = 'promptId' } = options;
|
||||
|
||||
if (!requiredPermission || typeof requiredPermission !== 'number') {
|
||||
throw new Error('canAccessPromptViaGroup: requiredPermission is required and must be a number');
|
||||
}
|
||||
|
||||
return canAccessResource({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermission,
|
||||
resourceIdParam,
|
||||
idResolver: resolvePromptToGroupId,
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
canAccessPromptViaGroup,
|
||||
};
|
||||
@@ -111,6 +111,7 @@ const canAccessResource = (options) => {
|
||||
// Check permissions using PermissionService with ObjectId
|
||||
const hasPermission = await checkPermission({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType,
|
||||
resourceId,
|
||||
requiredPermission,
|
||||
|
||||
125
api/server/middleware/accessResources/fileAccess.js
Normal file
125
api/server/middleware/accessResources/fileAccess.js
Normal file
@@ -0,0 +1,125 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { PermissionBits, hasPermissions, ResourceType } = require('librechat-data-provider');
|
||||
const { getEffectivePermissions } = require('~/server/services/PermissionService');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getFiles } = require('~/models/File');
|
||||
|
||||
/**
|
||||
* Checks if user has access to a file through agent permissions
|
||||
* Files inherit permissions from agents - if you can view the agent, you can access its files
|
||||
*/
|
||||
const checkAgentBasedFileAccess = async ({ userId, role, fileId }) => {
|
||||
try {
|
||||
// Find agents that have this file in their tool_resources
|
||||
const agentsWithFile = await getAgent({
|
||||
$or: [
|
||||
{ 'tool_resources.file_search.file_ids': fileId },
|
||||
{ 'tool_resources.execute_code.file_ids': fileId },
|
||||
{ 'tool_resources.ocr.file_ids': fileId },
|
||||
],
|
||||
});
|
||||
|
||||
if (!agentsWithFile || agentsWithFile.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if user has access to any of these agents
|
||||
for (const agent of Array.isArray(agentsWithFile) ? agentsWithFile : [agentsWithFile]) {
|
||||
// Check if user is the agent author
|
||||
if (agent.author && agent.author.toString() === userId) {
|
||||
logger.debug(`[fileAccess] User is author of agent ${agent.id}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check ACL permissions for VIEW access on the agent
|
||||
try {
|
||||
const permissions = await getEffectivePermissions({
|
||||
userId,
|
||||
role,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id || agent.id,
|
||||
});
|
||||
|
||||
if (hasPermissions(permissions, PermissionBits.VIEW)) {
|
||||
logger.debug(`[fileAccess] User ${userId} has VIEW permissions on agent ${agent.id}`);
|
||||
return true;
|
||||
}
|
||||
} catch (permissionError) {
|
||||
logger.warn(
|
||||
`[fileAccess] Permission check failed for agent ${agent.id}:`,
|
||||
permissionError.message,
|
||||
);
|
||||
// Continue checking other agents
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (error) {
|
||||
logger.error('[fileAccess] Error checking agent-based access:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware to check if user can access a file
|
||||
* Checks: 1) File ownership, 2) Agent-based access (file inherits agent permissions)
|
||||
*/
|
||||
const fileAccess = async (req, res, next) => {
|
||||
try {
|
||||
const fileId = req.params.file_id;
|
||||
const userId = req.user?.id;
|
||||
const userRole = req.user?.role;
|
||||
if (!fileId) {
|
||||
return res.status(400).json({
|
||||
error: 'Bad Request',
|
||||
message: 'file_id is required',
|
||||
});
|
||||
}
|
||||
|
||||
if (!userId) {
|
||||
return res.status(401).json({
|
||||
error: 'Unauthorized',
|
||||
message: 'Authentication required',
|
||||
});
|
||||
}
|
||||
|
||||
// Get the file
|
||||
const [file] = await getFiles({ file_id: fileId });
|
||||
if (!file) {
|
||||
return res.status(404).json({
|
||||
error: 'Not Found',
|
||||
message: 'File not found',
|
||||
});
|
||||
}
|
||||
|
||||
// Check if user owns the file
|
||||
if (file.user && file.user.toString() === userId) {
|
||||
req.fileAccess = { file };
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check agent-based access (file inherits agent permissions)
|
||||
const hasAgentAccess = await checkAgentBasedFileAccess({ userId, role: userRole, fileId });
|
||||
if (hasAgentAccess) {
|
||||
req.fileAccess = { file };
|
||||
return next();
|
||||
}
|
||||
|
||||
// No access
|
||||
logger.warn(`[fileAccess] User ${userId} denied access to file ${fileId}`);
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to access this file',
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[fileAccess] Error checking file access:', error);
|
||||
return res.status(500).json({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to check file access permissions',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
fileAccess,
|
||||
};
|
||||
@@ -1,9 +1,13 @@
|
||||
const { canAccessResource } = require('./canAccessResource');
|
||||
const { canAccessAgentResource } = require('./canAccessAgentResource');
|
||||
const { canAccessAgentFromBody } = require('./canAccessAgentFromBody');
|
||||
const { canAccessPromptViaGroup } = require('./canAccessPromptViaGroup');
|
||||
const { canAccessPromptGroupResource } = require('./canAccessPromptGroupResource');
|
||||
|
||||
module.exports = {
|
||||
canAccessResource,
|
||||
canAccessAgentResource,
|
||||
canAccessAgentFromBody,
|
||||
canAccessPromptViaGroup,
|
||||
canAccessPromptGroupResource,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
@@ -7,7 +7,8 @@ const { logger } = require('~/config');
|
||||
* Checks specific permission based on the 'type' query parameter:
|
||||
* - type=user: requires VIEW_USERS permission
|
||||
* - type=group: requires VIEW_GROUPS permission
|
||||
* - no type (mixed search): requires either VIEW_USERS OR VIEW_GROUPS
|
||||
* - type=role: requires VIEW_ROLES permission
|
||||
* - no type (mixed search): requires either VIEW_USERS OR VIEW_GROUPS OR VIEW_ROLES
|
||||
*/
|
||||
const checkPeoplePickerAccess = async (req, res, next) => {
|
||||
try {
|
||||
@@ -31,29 +32,38 @@ const checkPeoplePickerAccess = async (req, res, next) => {
|
||||
const peoplePickerPerms = role.permissions[PermissionTypes.PEOPLE_PICKER] || {};
|
||||
const canViewUsers = peoplePickerPerms[Permissions.VIEW_USERS] === true;
|
||||
const canViewGroups = peoplePickerPerms[Permissions.VIEW_GROUPS] === true;
|
||||
const canViewRoles = peoplePickerPerms[Permissions.VIEW_ROLES] === true;
|
||||
|
||||
if (type === 'user') {
|
||||
if (!canViewUsers) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
}
|
||||
} else if (type === 'group') {
|
||||
if (!canViewGroups) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (!canViewUsers || !canViewGroups) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for both users and groups',
|
||||
});
|
||||
}
|
||||
const permissionChecks = {
|
||||
[PrincipalType.USER]: {
|
||||
hasPermission: canViewUsers,
|
||||
message: 'Insufficient permissions to search for users',
|
||||
},
|
||||
[PrincipalType.GROUP]: {
|
||||
hasPermission: canViewGroups,
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
},
|
||||
[PrincipalType.ROLE]: {
|
||||
hasPermission: canViewRoles,
|
||||
message: 'Insufficient permissions to search for roles',
|
||||
},
|
||||
};
|
||||
|
||||
const check = permissionChecks[type];
|
||||
if (check && !check.hasPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: check.message,
|
||||
});
|
||||
}
|
||||
|
||||
if (!type && !canViewUsers && !canViewGroups && !canViewRoles) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users, groups, or roles',
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
|
||||
250
api/server/middleware/checkPeoplePickerAccess.spec.js
Normal file
250
api/server/middleware/checkPeoplePickerAccess.spec.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { checkPeoplePickerAccess } = require('./checkPeoplePickerAccess');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
jest.mock('~/models/Role');
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('checkPeoplePickerAccess', () => {
|
||||
let req, res, next;
|
||||
|
||||
beforeEach(() => {
|
||||
req = {
|
||||
user: { id: 'user123', role: 'USER' },
|
||||
query: {},
|
||||
};
|
||||
res = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn(),
|
||||
};
|
||||
next = jest.fn();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return 401 if user is not authenticated', async () => {
|
||||
req.user = null;
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Unauthorized',
|
||||
message: 'Authentication required',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 403 if role has no permissions', async () => {
|
||||
getRoleByName.mockResolvedValue(null);
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'No permissions configured for user role',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for users with VIEW_USERS permission', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for users without VIEW_USERS permission', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for groups with VIEW_GROUPS permission', async () => {
|
||||
req.query.type = PrincipalType.GROUP;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for groups without VIEW_GROUPS permission', async () => {
|
||||
req.query.type = PrincipalType.GROUP;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for roles with VIEW_ROLES permission', async () => {
|
||||
req.query.type = PrincipalType.ROLE;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for roles without VIEW_ROLES permission', async () => {
|
||||
req.query.type = PrincipalType.ROLE;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for roles',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow mixed search when user has at least one permission', async () => {
|
||||
// No type specified = mixed search
|
||||
req.query.type = undefined;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny mixed search when user has no permissions', async () => {
|
||||
// No type specified = mixed search
|
||||
req.query.type = undefined;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users, groups, or roles',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
const error = new Error('Database error');
|
||||
getRoleByName.mockRejectedValue(error);
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[checkPeoplePickerAccess][user123] checkPeoplePickerAccess error for req.query.type = undefined',
|
||||
error,
|
||||
);
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to check permissions',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle missing permissions object gracefully', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {}, // No PEOPLE_PICKER permissions
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -33,7 +33,7 @@ const validateModel = async (req, res, next) => {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { ILLEGAL_MODEL_REQ_SCORE: score = 5 } = process.env ?? {};
|
||||
const { ILLEGAL_MODEL_REQ_SCORE: score = 1 } = process.env ?? {};
|
||||
|
||||
const type = ViolationTypes.ILLEGAL_MODEL_REQUEST;
|
||||
const errorMessage = {
|
||||
|
||||
1259
api/server/routes/__tests__/mcp.spec.js
Normal file
1259
api/server/routes/__tests__/mcp.spec.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
const express = require('express');
|
||||
const { PermissionBits } = require('@librechat/data-schemas');
|
||||
const { ResourceType, PermissionBits } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserEffectivePermissions,
|
||||
updateResourcePermissions,
|
||||
@@ -46,11 +46,33 @@ router.get('/:resourceType/:resourceId', getResourcePermissions);
|
||||
*/
|
||||
router.put(
|
||||
'/:resourceType/:resourceId',
|
||||
canAccessResource({
|
||||
resourceType: 'agent',
|
||||
requiredPermission: PermissionBits.SHARE,
|
||||
resourceIdParam: 'resourceId',
|
||||
}),
|
||||
// Use middleware that dynamically handles resource type and permissions
|
||||
(req, res, next) => {
|
||||
const { resourceType } = req.params;
|
||||
let middleware;
|
||||
|
||||
if (resourceType === ResourceType.AGENT) {
|
||||
middleware = canAccessResource({
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermission: PermissionBits.SHARE,
|
||||
resourceIdParam: 'resourceId',
|
||||
});
|
||||
} else if (resourceType === ResourceType.PROMPTGROUP) {
|
||||
middleware = canAccessResource({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermission: PermissionBits.SHARE,
|
||||
resourceIdParam: 'resourceId',
|
||||
});
|
||||
} else {
|
||||
return res.status(400).json({
|
||||
error: 'Bad Request',
|
||||
message: `Unsupported resource type: ${resourceType}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Execute the middleware
|
||||
middleware(req, res, next);
|
||||
},
|
||||
updateResourcePermissions,
|
||||
);
|
||||
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
const express = require('express');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { logger, PermissionBits } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Permissions,
|
||||
ResourceType,
|
||||
PermissionTypes,
|
||||
actionDelimiter,
|
||||
PermissionBits,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||
const { findAccessibleResources } = require('~/server/services/PermissionService');
|
||||
const { getAgent, updateAgent, getListAgentsByAccess } = require('~/models/Agent');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { canAccessAgentResource } = require('~/server/middleware');
|
||||
const { getAgent, updateAgent } = require('~/models/Agent');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { getListAgentsByAccess } = require('~/models/Agent');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -36,7 +37,8 @@ router.get('/', async (req, res) => {
|
||||
const userId = req.user.id;
|
||||
const editableAgentObjectIds = await findAccessibleResources({
|
||||
userId,
|
||||
resourceType: 'agent',
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermissions: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const express = require('express');
|
||||
const { PermissionBits } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess, skipAgentCheck } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { PermissionTypes, Permissions, PermissionBits } = require('librechat-data-provider');
|
||||
const {
|
||||
setHeaders,
|
||||
moderateText,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const express = require('express');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { PermissionBits } = require('@librechat/data-schemas');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { PermissionTypes, Permissions, PermissionBits } = require('librechat-data-provider');
|
||||
const { requireJwtAuth, canAccessAgentResource } = require('~/server/middleware');
|
||||
const v1 = require('~/server/controllers/agents/v1');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
@@ -4,6 +4,7 @@ const {
|
||||
registrationController,
|
||||
resetPasswordController,
|
||||
resetPasswordRequestController,
|
||||
graphTokenController,
|
||||
} = require('~/server/controllers/AuthController');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { logoutController } = require('~/server/controllers/auth/LogoutController');
|
||||
@@ -69,4 +70,6 @@ router.post('/2fa/confirm', requireJwtAuth, confirm2FA);
|
||||
router.post('/2fa/disable', requireJwtAuth, disable2FA);
|
||||
router.post('/2fa/backup/regenerate', requireJwtAuth, regenerateBackupCodes);
|
||||
|
||||
router.get('/graph-token', requireJwtAuth, graphTokenController);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const express = require('express');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, defaultSocialLogins, Constants } = require('librechat-data-provider');
|
||||
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
|
||||
const { getLdapConfig } = require('~/server/services/Config/ldap');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getMCPManager } = require('~/config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -20,6 +21,9 @@ const publicSharedLinksEnabled =
|
||||
(process.env.ALLOW_SHARED_LINKS_PUBLIC === undefined ||
|
||||
isEnabled(process.env.ALLOW_SHARED_LINKS_PUBLIC));
|
||||
|
||||
const sharePointFilePickerEnabled = isEnabled(process.env.ENABLE_SHAREPOINT_FILEPICKER);
|
||||
const openidReuseTokens = isEnabled(process.env.OPENID_REUSE_TOKENS);
|
||||
|
||||
router.get('/', async function (req, res) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
|
||||
@@ -97,15 +101,26 @@ router.get('/', async function (req, res) {
|
||||
instanceProjectId: instanceProject._id.toString(),
|
||||
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
|
||||
staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL,
|
||||
sharePointFilePickerEnabled,
|
||||
sharePointBaseUrl: process.env.SHAREPOINT_BASE_URL,
|
||||
sharePointPickerGraphScope: process.env.SHAREPOINT_PICKER_GRAPH_SCOPE,
|
||||
sharePointPickerSharePointScope: process.env.SHAREPOINT_PICKER_SHAREPOINT_SCOPE,
|
||||
openidReuseTokens,
|
||||
};
|
||||
|
||||
payload.mcpServers = {};
|
||||
const config = await getCustomConfig();
|
||||
if (config?.mcpServers != null) {
|
||||
const mcpManager = getMCPManager();
|
||||
const oauthServers = mcpManager.getOAuthServers();
|
||||
|
||||
for (const serverName in config.mcpServers) {
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = {
|
||||
customUserVars: serverConfig?.customUserVars || {},
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
isOAuth: oauthServers.has(serverName),
|
||||
startup: serverConfig?.startup,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ router.delete('/', async (req, res) => {
|
||||
/** @type {{ openai: OpenAI }} */
|
||||
const { openai } = await assistantClients[endpoint].initializeClient({ req, res });
|
||||
try {
|
||||
const response = await openai.beta.threads.del(thread_id);
|
||||
const response = await openai.beta.threads.delete(thread_id);
|
||||
logger.debug('Deleted OpenAI thread:', response);
|
||||
} catch (error) {
|
||||
logger.error('Error deleting OpenAI thread:', error);
|
||||
|
||||
@@ -3,6 +3,7 @@ const express = require('express');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { filterFile } = require('~/server/services/Files/process');
|
||||
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -18,7 +19,7 @@ router.post('/', async (req, res) => {
|
||||
throw new Error('User ID is undefined');
|
||||
}
|
||||
|
||||
const fileStrategy = req.app.locals.fileStrategy;
|
||||
const fileStrategy = getFileStrategy(req.app.locals, { isAvatar: true });
|
||||
const desiredFormat = req.app.locals.imageOutputType;
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
|
||||
@@ -2,9 +2,9 @@ const express = require('express');
|
||||
const request = require('supertest');
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { PERMISSION_BITS } = require('librechat-data-provider');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { createMethods } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
|
||||
const { createAgent } = require('~/models/Agent');
|
||||
const { createFile } = require('~/models/File');
|
||||
|
||||
@@ -185,11 +185,11 @@ describe('File Routes - Agent Files Endpoint', () => {
|
||||
// Grant EDIT permission to user on the agent using PermissionService
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
@@ -240,11 +240,11 @@ describe('File Routes - Agent Files Endpoint', () => {
|
||||
// Grant only VIEW permission to user on the agent
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_viewer',
|
||||
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
|
||||
@@ -6,8 +6,9 @@ const {
|
||||
isUUID,
|
||||
CacheKeys,
|
||||
FileSources,
|
||||
PERMISSION_BITS,
|
||||
ResourceType,
|
||||
EModelEndpoint,
|
||||
PermissionBits,
|
||||
isAgentsEndpoint,
|
||||
checkOpenAIStorage,
|
||||
} = require('librechat-data-provider');
|
||||
@@ -17,6 +18,7 @@ const {
|
||||
processDeleteRequest,
|
||||
processAgentFileUpload,
|
||||
} = require('~/server/services/Files/process');
|
||||
const { fileAccess } = require('~/server/middleware/accessResources/fileAccess');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { checkPermission } = require('~/server/services/PermissionService');
|
||||
@@ -24,6 +26,7 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files');
|
||||
const { getFiles, batchUpdateFiles } = require('~/models/File');
|
||||
const { cleanFileName } = require('~/server/utils/files');
|
||||
const { getAssistant } = require('~/models/Assistant');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getLogStores } = require('~/cache');
|
||||
@@ -68,22 +71,18 @@ router.get('/agent/:agent_id', async (req, res) => {
|
||||
return res.status(400).json({ error: 'Agent ID is required' });
|
||||
}
|
||||
|
||||
// Get the agent to check ownership and attached files
|
||||
const agent = await getAgent({ id: agent_id });
|
||||
|
||||
if (!agent) {
|
||||
// No agent found, return empty array
|
||||
return res.status(200).json([]);
|
||||
}
|
||||
|
||||
// Check if user has access to the agent
|
||||
if (agent.author.toString() !== userId) {
|
||||
// Non-authors need at least EDIT permission to view agent files
|
||||
const hasEditPermission = await checkPermission({
|
||||
userId,
|
||||
resourceType: 'agent',
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
requiredPermission: PERMISSION_BITS.EDIT,
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
});
|
||||
|
||||
if (!hasEditPermission) {
|
||||
@@ -91,7 +90,6 @@ router.get('/agent/:agent_id', async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
// Collect all file IDs from agent's tool resources
|
||||
const agentFileIds = [];
|
||||
if (agent.tool_resources) {
|
||||
for (const [, resource] of Object.entries(agent.tool_resources)) {
|
||||
@@ -101,12 +99,10 @@ router.get('/agent/:agent_id', async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
// If no files attached to agent, return empty array
|
||||
if (agentFileIds.length === 0) {
|
||||
return res.status(200).json([]);
|
||||
}
|
||||
|
||||
// Get only the files attached to this agent
|
||||
const files = await getFiles({ file_id: { $in: agentFileIds } }, null, { text: 0 });
|
||||
|
||||
res.status(200).json(files);
|
||||
@@ -155,18 +151,15 @@ router.delete('/', async (req, res) => {
|
||||
|
||||
const ownedFiles = [];
|
||||
const nonOwnedFiles = [];
|
||||
const fileMap = new Map();
|
||||
|
||||
for (const file of dbFiles) {
|
||||
fileMap.set(file.file_id, file);
|
||||
if (file.user.toString() === req.user.id) {
|
||||
if (file.user.toString() === req.user.id.toString()) {
|
||||
ownedFiles.push(file);
|
||||
} else {
|
||||
nonOwnedFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
// If all files are owned by the user, no need for further checks
|
||||
if (nonOwnedFiles.length === 0) {
|
||||
await processDeleteRequest({ req, files: ownedFiles });
|
||||
logger.debug(
|
||||
@@ -179,20 +172,18 @@ router.delete('/', async (req, res) => {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check access for non-owned files
|
||||
let authorizedFiles = [...ownedFiles];
|
||||
let unauthorizedFiles = [];
|
||||
|
||||
if (req.body.agent_id && nonOwnedFiles.length > 0) {
|
||||
// Batch check access for all non-owned files
|
||||
const nonOwnedFileIds = nonOwnedFiles.map((f) => f.file_id);
|
||||
const accessMap = await hasAccessToFilesViaAgent(
|
||||
req.user.id,
|
||||
nonOwnedFileIds,
|
||||
req.body.agent_id,
|
||||
);
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: req.user.id,
|
||||
role: req.user.role,
|
||||
fileIds: nonOwnedFileIds,
|
||||
agentId: req.body.agent_id,
|
||||
});
|
||||
|
||||
// Separate authorized and unauthorized files
|
||||
for (const file of nonOwnedFiles) {
|
||||
if (accessMap.get(file.file_id)) {
|
||||
authorizedFiles.push(file);
|
||||
@@ -201,7 +192,6 @@ router.delete('/', async (req, res) => {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No agent context, all non-owned files are unauthorized
|
||||
unauthorizedFiles = nonOwnedFiles;
|
||||
}
|
||||
|
||||
@@ -305,42 +295,30 @@ router.get('/code/download/:session_id/:fileId', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/download/:userId/:file_id', async (req, res) => {
|
||||
router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
|
||||
try {
|
||||
const { userId, file_id } = req.params;
|
||||
logger.debug(`File download requested by user ${userId}: ${file_id}`);
|
||||
|
||||
if (userId !== req.user.id) {
|
||||
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
|
||||
return res.status(403).send('Forbidden');
|
||||
}
|
||||
|
||||
const [file] = await getFiles({ file_id });
|
||||
const errorPrefix = `File download requested by user ${userId}`;
|
||||
|
||||
if (!file) {
|
||||
logger.warn(`${errorPrefix} not found: ${file_id}`);
|
||||
return res.status(404).send('File not found');
|
||||
}
|
||||
|
||||
if (!file.filepath.includes(userId)) {
|
||||
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
|
||||
return res.status(403).send('Forbidden');
|
||||
}
|
||||
// Access already validated by fileAccess middleware
|
||||
const file = req.fileAccess.file;
|
||||
|
||||
if (checkOpenAIStorage(file.source) && !file.model) {
|
||||
logger.warn(`${errorPrefix} has no associated model: ${file_id}`);
|
||||
logger.warn(`File download requested by user ${userId} has no associated model: ${file_id}`);
|
||||
return res.status(400).send('The model used when creating this file is not available');
|
||||
}
|
||||
|
||||
const { getDownloadStream } = getStrategyFunctions(file.source);
|
||||
if (!getDownloadStream) {
|
||||
logger.warn(`${errorPrefix} has no stream method implemented: ${file.source}`);
|
||||
logger.warn(
|
||||
`File download requested by user ${userId} has no stream method implemented: ${file.source}`,
|
||||
);
|
||||
return res.status(501).send('Not Implemented');
|
||||
}
|
||||
|
||||
const setHeaders = () => {
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${file.filename}"`);
|
||||
const cleanedFilename = cleanFileName(file.filename);
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${cleanedFilename}"`);
|
||||
res.setHeader('Content-Type', 'application/octet-stream');
|
||||
res.setHeader('X-File-Metadata', JSON.stringify(file));
|
||||
};
|
||||
@@ -367,12 +345,17 @@ router.get('/download/:userId/:file_id', async (req, res) => {
|
||||
logger.debug(`File ${file_id} downloaded from OpenAI`);
|
||||
passThrough.body.pipe(res);
|
||||
} else {
|
||||
fileStream = getDownloadStream(file_id);
|
||||
fileStream = await getDownloadStream(req, file.filepath);
|
||||
|
||||
fileStream.on('error', (streamError) => {
|
||||
logger.error('[DOWNLOAD ROUTE] Stream error:', streamError);
|
||||
});
|
||||
|
||||
setHeaders();
|
||||
fileStream.pipe(res);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error downloading file:', error);
|
||||
logger.error('[DOWNLOAD ROUTE] Error downloading file:', error);
|
||||
res.status(500).send('Error downloading file');
|
||||
}
|
||||
});
|
||||
@@ -407,7 +390,6 @@ router.post('/', async (req, res) => {
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
// TODO: delete remote file if it exists
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
cleanup = false;
|
||||
@@ -415,13 +397,15 @@ router.post('/', async (req, res) => {
|
||||
logger.error('[/files] Error deleting file:', error);
|
||||
}
|
||||
res.status(500).json({ message });
|
||||
}
|
||||
|
||||
if (cleanup) {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error deleting file after file processing:', error);
|
||||
} finally {
|
||||
if (cleanup) {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error deleting file after file processing:', error);
|
||||
}
|
||||
} else {
|
||||
logger.debug('[/files] File processing completed without cleanup');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -4,6 +4,12 @@ const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { createMethods } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
} = require('librechat-data-provider');
|
||||
const { createAgent } = require('~/models/Agent');
|
||||
const { createFile } = require('~/models/File');
|
||||
|
||||
@@ -61,11 +67,8 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
let Agent;
|
||||
let AclEntry;
|
||||
let User;
|
||||
let AccessRole;
|
||||
let methods;
|
||||
let modelsToCleanup = [];
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let agentId;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
@@ -90,7 +93,6 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
Agent = models.Agent;
|
||||
AclEntry = models.AclEntry;
|
||||
User = models.User;
|
||||
AccessRole = models.AccessRole;
|
||||
|
||||
// Seed default roles using our methods
|
||||
await methods.seedDefaultRoles();
|
||||
@@ -98,9 +100,11 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Mock authentication middleware
|
||||
app.use((req, res, next) => {
|
||||
req.user = { id: otherUserId ? otherUserId.toString() : 'default-user' };
|
||||
req.user = {
|
||||
id: otherUserId || 'default-user',
|
||||
role: SystemRoles.USER,
|
||||
};
|
||||
req.app = { locals: {} };
|
||||
next();
|
||||
});
|
||||
@@ -139,7 +143,6 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
// Create test data
|
||||
authorId = new mongoose.Types.ObjectId();
|
||||
otherUserId = new mongoose.Types.ObjectId();
|
||||
agentId = uuidv4();
|
||||
fileId = uuidv4();
|
||||
|
||||
// Create users in database
|
||||
@@ -231,11 +234,11 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
// Grant EDIT permission to user on the agent
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
@@ -285,11 +288,11 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
// Grant EDIT permission to user on the agent
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
@@ -351,11 +354,11 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
// Grant EDIT permission to user on the agent
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_editor',
|
||||
accessRoleId: AccessRoleIds.AGENT_EDITOR,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
@@ -394,11 +397,11 @@ describe('File Routes - Delete with Agent Access', () => {
|
||||
// Grant only VIEW permission to user on the agent
|
||||
const { grantPermission } = require('~/server/services/PermissionService');
|
||||
await grantPermission({
|
||||
principalType: 'user',
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUserId,
|
||||
resourceType: 'agent',
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: 'agent_viewer',
|
||||
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
const { Router } = require('express');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { setCachedTools, getCachedTools, loadCustomConfig } = require('~/server/services/Config');
|
||||
const { getMCPSetupData, getServerConnectionStatus } = require('~/server/services/MCP');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getFlowStateManager } = require('~/config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const router = Router();
|
||||
@@ -89,7 +93,6 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
return res.redirect('/oauth/error?error=missing_state');
|
||||
}
|
||||
|
||||
// Extract flow ID from state
|
||||
const flowId = state;
|
||||
logger.debug('[MCP OAuth] Using flow ID from state', { flowId });
|
||||
|
||||
@@ -112,14 +115,68 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
hasCodeVerifier: !!flowState.codeVerifier,
|
||||
});
|
||||
|
||||
// Complete the OAuth flow
|
||||
logger.debug('[MCP OAuth] Completing OAuth flow');
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager);
|
||||
logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route');
|
||||
|
||||
// For system-level OAuth, we need to store the tokens and retry the connection
|
||||
if (flowState.userId === 'system') {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
try {
|
||||
const mcpManager = getMCPManager(flowState.userId);
|
||||
logger.debug(`[MCP OAuth] Attempting to reconnect ${serverName} with new OAuth tokens`);
|
||||
|
||||
if (flowState.userId !== 'system') {
|
||||
const user = { id: flowState.userId };
|
||||
|
||||
const userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`[MCP OAuth] Successfully reconnected ${serverName} for user ${flowState.userId}`,
|
||||
);
|
||||
|
||||
const userTools = (await getCachedTools({ userId: flowState.userId })) || {};
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: flowState.userId });
|
||||
|
||||
logger.debug(
|
||||
`[MCP OAuth] Cached ${tools.length} tools for ${serverName} user ${flowState.userId}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[MCP OAuth] Failed to reconnect ${serverName} after OAuth, but tokens are saved:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
|
||||
/** ID of the flow that the tool/connection is waiting for */
|
||||
@@ -151,7 +208,6 @@ router.get('/oauth/tokens/:flowId', requireJwtAuth, async (req, res) => {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
// Allow system flows or user-owned flows
|
||||
if (!flowId.startsWith(`${user.id}:`) && !flowId.startsWith('system:')) {
|
||||
return res.status(403).json({ error: 'Access denied' });
|
||||
}
|
||||
@@ -202,4 +258,338 @@ router.get('/oauth/status/:flowId', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Cancel OAuth flow
|
||||
* This endpoint cancels a pending OAuth flow
|
||||
*/
|
||||
router.post('/oauth/cancel/:serverName', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP OAuth Cancel] Cancelling OAuth flow for ${serverName} by user ${user.id}`);
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
|
||||
|
||||
if (!flowState) {
|
||||
logger.debug(`[MCP OAuth Cancel] No active flow found for ${serverName}`);
|
||||
return res.json({
|
||||
success: true,
|
||||
message: 'No active OAuth flow to cancel',
|
||||
});
|
||||
}
|
||||
|
||||
await flowManager.failFlow(flowId, 'mcp_oauth', 'User cancelled OAuth flow');
|
||||
|
||||
logger.info(`[MCP OAuth Cancel] Successfully cancelled OAuth flow for ${serverName}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `OAuth flow for ${serverName} cancelled successfully`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP OAuth Cancel] Failed to cancel OAuth flow', error);
|
||||
res.status(500).json({ error: 'Failed to cancel OAuth flow' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Reinitialize MCP server
|
||||
* This endpoint allows reinitializing a specific MCP server
|
||||
*/
|
||||
router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP Reinitialize] Reinitializing server: ${serverName}`);
|
||||
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
await mcpManager.disconnectServer(serverName);
|
||||
logger.info(`[MCP Reinitialize] Disconnected existing server: ${serverName}`);
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
mcpManager.mcpConfigs[serverName] = serverConfig;
|
||||
let customUserVars = {};
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false);
|
||||
customUserVars[varName] = value;
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error fetching ${varName} for user ${user.id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let userConnection = null;
|
||||
let oauthRequired = false;
|
||||
let oauthUrl = null;
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
customUserVars,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
returnOnOAuth: true,
|
||||
oauthStart: async (authURL) => {
|
||||
logger.info(`[MCP Reinitialize] OAuth URL received: ${authURL}`);
|
||||
oauthUrl = authURL;
|
||||
oauthRequired = true;
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`[MCP Reinitialize] Successfully established connection for ${serverName}`);
|
||||
} catch (err) {
|
||||
logger.info(`[MCP Reinitialize] getUserConnection threw error: ${err.message}`);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth state - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const isOAuthError =
|
||||
err.message?.includes('OAuth') ||
|
||||
err.message?.includes('authentication') ||
|
||||
err.message?.includes('401');
|
||||
|
||||
const isOAuthFlowInitiated = err.message === 'OAuth flow initiated - return early';
|
||||
|
||||
if (isOAuthError || oauthRequired || isOAuthFlowInitiated) {
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth required for ${serverName} (isOAuthError: ${isOAuthError}, oauthRequired: ${oauthRequired}, isOAuthFlowInitiated: ${isOAuthFlowInitiated})`,
|
||||
);
|
||||
oauthRequired = true;
|
||||
} else {
|
||||
logger.error(
|
||||
`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`,
|
||||
err,
|
||||
);
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
}
|
||||
|
||||
if (userConnection && !oauthRequired) {
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`[MCP Reinitialize] Sending response for ${serverName} - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const getResponseMessage = () => {
|
||||
if (oauthRequired) {
|
||||
return `MCP server '${serverName}' ready for OAuth authentication`;
|
||||
}
|
||||
if (userConnection) {
|
||||
return `MCP server '${serverName}' reinitialized successfully`;
|
||||
}
|
||||
return `Failed to reinitialize MCP server '${serverName}'`;
|
||||
};
|
||||
|
||||
res.json({
|
||||
success: (userConnection && !oauthRequired) || (oauthRequired && oauthUrl),
|
||||
message: getResponseMessage(),
|
||||
serverName,
|
||||
oauthRequired,
|
||||
oauthUrl,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP Reinitialize] Unexpected error', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get connection status for all MCP servers
|
||||
* This endpoint returns all app level and user-scoped connection statuses from MCPManager without disconnecting idle connections
|
||||
*/
|
||||
router.get('/connection/status', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const { mcpConfig, appConnections, userConnections, oauthServers } = await getMCPSetupData(
|
||||
user.id,
|
||||
);
|
||||
const connectionStatus = {};
|
||||
|
||||
for (const [serverName] of Object.entries(mcpConfig)) {
|
||||
connectionStatus[serverName] = await getServerConnectionStatus(
|
||||
user.id,
|
||||
serverName,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
);
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
connectionStatus,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.message === 'MCP config not found') {
|
||||
return res.status(404).json({ error: error.message });
|
||||
}
|
||||
logger.error('[MCP Connection Status] Failed to get connection status', error);
|
||||
res.status(500).json({ error: 'Failed to get connection status' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get connection status for a single MCP server
|
||||
* This endpoint returns the connection status for a specific server for a given user
|
||||
*/
|
||||
router.get('/connection/status/:serverName', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const user = req.user;
|
||||
const { serverName } = req.params;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const { mcpConfig, appConnections, userConnections, oauthServers } = await getMCPSetupData(
|
||||
user.id,
|
||||
);
|
||||
|
||||
if (!mcpConfig[serverName]) {
|
||||
return res
|
||||
.status(404)
|
||||
.json({ error: `MCP server '${serverName}' not found in configuration` });
|
||||
}
|
||||
|
||||
const serverStatus = await getServerConnectionStatus(
|
||||
user.id,
|
||||
serverName,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
serverName,
|
||||
connectionStatus: serverStatus.connectionState,
|
||||
requiresOAuth: serverStatus.requiresOAuth,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.message === 'MCP config not found') {
|
||||
return res.status(404).json({ error: error.message });
|
||||
}
|
||||
logger.error(
|
||||
`[MCP Per-Server Status] Failed to get connection status for ${req.params.serverName}`,
|
||||
error,
|
||||
);
|
||||
res.status(500).json({ error: 'Failed to get connection status' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Check which authentication values exist for a specific MCP server
|
||||
* This endpoint returns only boolean flags indicating if values are set, not the actual values
|
||||
*/
|
||||
router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
const pluginKey = `${Constants.mcp_prefix}${serverName}`;
|
||||
const authValueFlags = {};
|
||||
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false, pluginKey);
|
||||
authValueFlags[varName] = !!(value && value.length > 0);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[MCP Auth Value Flags] Error checking ${varName} for user ${user.id}:`,
|
||||
err,
|
||||
);
|
||||
authValueFlags[varName] = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
serverName,
|
||||
authValueFlags,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP Auth Value Flags] Failed to check auth value flags for ${req.params.serverName}`,
|
||||
error,
|
||||
);
|
||||
res.status(500).json({ error: 'Failed to check auth value flags' });
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -13,6 +13,8 @@ const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const memoryPayloadLimit = express.json({ limit: '100kb' });
|
||||
|
||||
const checkMemoryRead = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.READ],
|
||||
@@ -60,6 +62,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
let usagePercentage = null;
|
||||
if (tokenLimit && tokenLimit > 0) {
|
||||
@@ -70,6 +73,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
memories: sortedMemories,
|
||||
totalTokens,
|
||||
tokenLimit: tokenLimit || null,
|
||||
charLimit,
|
||||
usagePercentage,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -83,7 +87,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
* Body: { key: string, value: string }
|
||||
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
|
||||
*/
|
||||
router.post('/', checkMemoryCreate, async (req, res) => {
|
||||
router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
|
||||
const { key, value } = req.body;
|
||||
|
||||
if (typeof key !== 'string' || key.trim() === '') {
|
||||
@@ -94,13 +98,25 @@ router.post('/', checkMemoryCreate, async (req, res) => {
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (key.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${key.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
|
||||
// Check token limit
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
|
||||
if (tokenLimit) {
|
||||
@@ -175,7 +191,7 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
|
||||
* Body: { key?: string, value: string }
|
||||
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
|
||||
*/
|
||||
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) => {
|
||||
const { key: urlKey } = req.params;
|
||||
const { key: bodyKey, value } = req.body || {};
|
||||
|
||||
@@ -183,9 +199,23 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
// Use the key from the body if provided, otherwise use the key from the URL
|
||||
const newKey = bodyKey || urlKey;
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (newKey.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${newKey.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
@@ -196,7 +226,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(404).json({ error: 'Memory not found.' });
|
||||
}
|
||||
|
||||
// If the key is changing, we need to handle it specially
|
||||
if (newKey !== urlKey) {
|
||||
const keyExists = memories.find((m) => m.key === newKey);
|
||||
if (keyExists) {
|
||||
@@ -219,7 +248,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(500).json({ error: 'Failed to delete old memory.' });
|
||||
}
|
||||
} else {
|
||||
// Key is not changing, just update the value
|
||||
const result = await setMemory({
|
||||
userId: req.user.id,
|
||||
key: newKey,
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
// file deepcode ignore NoRateLimitingForLogin: Rate limiting is handled by the `loginLimiter` middleware
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { randomState } = require('openid-client');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const {
|
||||
checkBan,
|
||||
logHeaders,
|
||||
@@ -11,8 +14,6 @@ const {
|
||||
} = require('~/server/middleware');
|
||||
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -48,13 +49,13 @@ const oauthHandler = async (req, res) => {
|
||||
};
|
||||
|
||||
router.get('/error', (req, res) => {
|
||||
// A single error message is pushed by passport when authentication fails.
|
||||
/** A single error message is pushed by passport when authentication fails. */
|
||||
const errorMessage = req.session?.messages?.pop() || 'Unknown error';
|
||||
logger.error('Error in OAuth authentication:', {
|
||||
message: req.session?.messages?.pop() || 'Unknown error',
|
||||
message: errorMessage,
|
||||
});
|
||||
|
||||
// Redirect to login page with auth_failed parameter to prevent infinite redirect loops
|
||||
res.redirect(`${domains.client}/login?redirect=false`);
|
||||
res.redirect(`${domains.client}/login?redirect=false&error=${ErrorTypes.AUTH_FAILED}`);
|
||||
});
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,22 +1,45 @@
|
||||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { Permissions, SystemRoles, PermissionTypes } = require('librechat-data-provider');
|
||||
const {
|
||||
getPrompt,
|
||||
getPrompts,
|
||||
savePrompt,
|
||||
deletePrompt,
|
||||
getPromptGroup,
|
||||
getPromptGroups,
|
||||
generateCheckAccess,
|
||||
markPublicPromptGroups,
|
||||
buildPromptGroupFilter,
|
||||
formatPromptGroupsResponse,
|
||||
createEmptyPromptGroupsResponse,
|
||||
filterAccessibleIdsBySharedLogic,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Permissions,
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
PermissionBits,
|
||||
PermissionTypes,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getListPromptGroupsByAccess,
|
||||
makePromptProduction,
|
||||
updatePromptGroup,
|
||||
deletePromptGroup,
|
||||
createPromptGroup,
|
||||
getAllPromptGroups,
|
||||
// updatePromptLabels,
|
||||
makePromptProduction,
|
||||
getPromptGroup,
|
||||
deletePrompt,
|
||||
getPrompts,
|
||||
savePrompt,
|
||||
getPrompt,
|
||||
} = require('~/models/Prompt');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const {
|
||||
canAccessPromptGroupResource,
|
||||
canAccessPromptViaGroup,
|
||||
requireJwtAuth,
|
||||
} = require('~/server/middleware');
|
||||
const {
|
||||
findPubliclyAccessibleResources,
|
||||
getEffectivePermissions,
|
||||
findAccessibleResources,
|
||||
grantPermission,
|
||||
} = require('~/server/services/PermissionService');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -48,43 +71,78 @@ router.use(checkPromptAccess);
|
||||
* Route to get single prompt group by its ID
|
||||
* GET /groups/:groupId
|
||||
*/
|
||||
router.get('/groups/:groupId', async (req, res) => {
|
||||
let groupId = req.params.groupId;
|
||||
const author = req.user.id;
|
||||
router.get(
|
||||
'/groups/:groupId',
|
||||
canAccessPromptGroupResource({
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
}),
|
||||
async (req, res) => {
|
||||
const { groupId } = req.params;
|
||||
|
||||
const query = {
|
||||
_id: groupId,
|
||||
$or: [{ projectIds: { $exists: true, $ne: [], $not: { $size: 0 } } }, { author }],
|
||||
};
|
||||
try {
|
||||
const group = await getPromptGroup({ _id: groupId });
|
||||
|
||||
if (req.user.role === SystemRoles.ADMIN) {
|
||||
delete query.$or;
|
||||
}
|
||||
if (!group) {
|
||||
return res.status(404).send({ message: 'Prompt group not found' });
|
||||
}
|
||||
|
||||
try {
|
||||
const group = await getPromptGroup(query);
|
||||
|
||||
if (!group) {
|
||||
return res.status(404).send({ message: 'Prompt group not found' });
|
||||
res.status(200).send(group);
|
||||
} catch (error) {
|
||||
logger.error('Error getting prompt group', error);
|
||||
res.status(500).send({ message: 'Error getting prompt group' });
|
||||
}
|
||||
|
||||
res.status(200).send(group);
|
||||
} catch (error) {
|
||||
logger.error('Error getting prompt group', error);
|
||||
res.status(500).send({ message: 'Error getting prompt group' });
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* Route to fetch all prompt groups
|
||||
* GET /groups
|
||||
* Route to fetch all prompt groups (ACL-aware)
|
||||
* GET /all
|
||||
*/
|
||||
router.get('/all', async (req, res) => {
|
||||
try {
|
||||
const groups = await getAllPromptGroups(req, {
|
||||
author: req.user._id,
|
||||
const userId = req.user.id;
|
||||
const { name, category, ...otherFilters } = req.query;
|
||||
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
|
||||
name,
|
||||
category,
|
||||
...otherFilters,
|
||||
});
|
||||
res.status(200).send(groups);
|
||||
|
||||
let accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
|
||||
accessibleIds,
|
||||
searchShared,
|
||||
searchSharedOnly,
|
||||
publicPromptGroupIds: publiclyAccessibleIds,
|
||||
});
|
||||
|
||||
const result = await getListPromptGroupsByAccess({
|
||||
accessibleIds: filteredAccessibleIds,
|
||||
otherParams: filter,
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
return res.status(200).send([]);
|
||||
}
|
||||
|
||||
const { data: promptGroups = [] } = result;
|
||||
if (!promptGroups.length) {
|
||||
return res.status(200).send([]);
|
||||
}
|
||||
|
||||
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
|
||||
res.status(200).send(groupsWithPublicFlag);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error getting prompt groups' });
|
||||
@@ -92,16 +150,72 @@ router.get('/all', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* Route to fetch paginated prompt groups with filters
|
||||
* Route to fetch paginated prompt groups with filters (ACL-aware)
|
||||
* GET /groups
|
||||
*/
|
||||
router.get('/groups', async (req, res) => {
|
||||
try {
|
||||
const filter = req.query;
|
||||
/* Note: The aggregation requires an ObjectId */
|
||||
filter.author = req.user._id;
|
||||
const groups = await getPromptGroups(req, filter);
|
||||
res.status(200).send(groups);
|
||||
const userId = req.user.id;
|
||||
const { pageSize, pageNumber, limit, cursor, name, category, ...otherFilters } = req.query;
|
||||
|
||||
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
|
||||
name,
|
||||
category,
|
||||
...otherFilters,
|
||||
});
|
||||
|
||||
let actualLimit = limit;
|
||||
let actualCursor = cursor;
|
||||
|
||||
if (pageSize && !limit) {
|
||||
actualLimit = parseInt(pageSize, 10);
|
||||
}
|
||||
|
||||
let accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
|
||||
accessibleIds,
|
||||
searchShared,
|
||||
searchSharedOnly,
|
||||
publicPromptGroupIds: publiclyAccessibleIds,
|
||||
});
|
||||
|
||||
const result = await getListPromptGroupsByAccess({
|
||||
accessibleIds: filteredAccessibleIds,
|
||||
otherParams: filter,
|
||||
limit: actualLimit,
|
||||
after: actualCursor,
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
const emptyResponse = createEmptyPromptGroupsResponse({ pageNumber, pageSize, actualLimit });
|
||||
return res.status(200).send(emptyResponse);
|
||||
}
|
||||
|
||||
const { data: promptGroups = [], has_more = false, after = null } = result;
|
||||
|
||||
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
|
||||
|
||||
const response = formatPromptGroupsResponse({
|
||||
promptGroups: groupsWithPublicFlag,
|
||||
pageNumber,
|
||||
pageSize,
|
||||
actualLimit,
|
||||
hasMore: has_more,
|
||||
after,
|
||||
});
|
||||
|
||||
res.status(200).send(response);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error getting prompt groups' });
|
||||
@@ -109,16 +223,17 @@ router.get('/groups', async (req, res) => {
|
||||
});
|
||||
|
||||
/**
|
||||
* Updates or creates a prompt + promptGroup
|
||||
* Creates a new prompt group with initial prompt
|
||||
* @param {object} req
|
||||
* @param {TCreatePrompt} req.body
|
||||
* @param {Express.Response} res
|
||||
*/
|
||||
const createPrompt = async (req, res) => {
|
||||
const createNewPromptGroup = async (req, res) => {
|
||||
try {
|
||||
const { prompt, group } = req.body;
|
||||
if (!prompt) {
|
||||
return res.status(400).send({ error: 'Prompt is required' });
|
||||
|
||||
if (!prompt || !group || !group.name) {
|
||||
return res.status(400).send({ error: 'Prompt and group name are required' });
|
||||
}
|
||||
|
||||
const saveData = {
|
||||
@@ -128,21 +243,80 @@ const createPrompt = async (req, res) => {
|
||||
authorName: req.user.name,
|
||||
};
|
||||
|
||||
/** @type {TCreatePromptResponse} */
|
||||
let result;
|
||||
if (group && group.name) {
|
||||
result = await createPromptGroup(saveData);
|
||||
} else {
|
||||
result = await savePrompt(saveData);
|
||||
const result = await createPromptGroup(saveData);
|
||||
|
||||
if (result.prompt && result.prompt._id && result.prompt.groupId) {
|
||||
try {
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: req.user.id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: result.prompt.groupId,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: req.user.id,
|
||||
});
|
||||
logger.debug(
|
||||
`[createPromptGroup] Granted owner permissions to user ${req.user.id} for promptGroup ${result.prompt.groupId}`,
|
||||
);
|
||||
} catch (permissionError) {
|
||||
logger.error(
|
||||
`[createPromptGroup] Failed to grant owner permissions for promptGroup ${result.prompt.groupId}:`,
|
||||
permissionError,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error saving prompt' });
|
||||
res.status(500).send({ error: 'Error creating prompt group' });
|
||||
}
|
||||
};
|
||||
|
||||
router.post('/', checkPromptCreate, createPrompt);
|
||||
/**
|
||||
* Adds a new prompt to an existing prompt group
|
||||
* @param {object} req
|
||||
* @param {TCreatePrompt} req.body
|
||||
* @param {Express.Response} res
|
||||
*/
|
||||
const addPromptToGroup = async (req, res) => {
|
||||
try {
|
||||
const { groupId } = req.params;
|
||||
const { prompt } = req.body;
|
||||
|
||||
if (!prompt) {
|
||||
return res.status(400).send({ error: 'Prompt is required' });
|
||||
}
|
||||
|
||||
// Ensure the prompt is associated with the correct group
|
||||
prompt.groupId = groupId;
|
||||
|
||||
const saveData = {
|
||||
prompt,
|
||||
author: req.user.id,
|
||||
authorName: req.user.name,
|
||||
};
|
||||
|
||||
const result = await savePrompt(saveData);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error adding prompt to group' });
|
||||
}
|
||||
};
|
||||
|
||||
// Create new prompt group (requires CREATE permission)
|
||||
router.post('/', checkPromptCreate, createNewPromptGroup);
|
||||
|
||||
// Add prompt to existing group (requires EDIT permission on the group)
|
||||
router.post(
|
||||
'/groups/:groupId/prompts',
|
||||
checkPromptAccess,
|
||||
canAccessPromptGroupResource({
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
}),
|
||||
addPromptToGroup,
|
||||
);
|
||||
|
||||
/**
|
||||
* Updates a prompt group
|
||||
@@ -168,35 +342,74 @@ const patchPromptGroup = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
router.patch('/groups/:groupId', checkGlobalPromptShare, patchPromptGroup);
|
||||
router.patch(
|
||||
'/groups/:groupId',
|
||||
checkGlobalPromptShare,
|
||||
canAccessPromptGroupResource({
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
}),
|
||||
patchPromptGroup,
|
||||
);
|
||||
|
||||
router.patch('/:promptId/tags/production', checkPromptCreate, async (req, res) => {
|
||||
try {
|
||||
router.patch(
|
||||
'/:promptId/tags/production',
|
||||
checkPromptCreate,
|
||||
canAccessPromptViaGroup({
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
resourceIdParam: 'promptId',
|
||||
}),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const { promptId } = req.params;
|
||||
const result = await makePromptProduction(promptId);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error updating prompt production' });
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/:promptId',
|
||||
canAccessPromptViaGroup({
|
||||
requiredPermission: PermissionBits.VIEW,
|
||||
resourceIdParam: 'promptId',
|
||||
}),
|
||||
async (req, res) => {
|
||||
const { promptId } = req.params;
|
||||
const result = await makePromptProduction(promptId);
|
||||
res.status(200).send(result);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error updating prompt production' });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/:promptId', async (req, res) => {
|
||||
const { promptId } = req.params;
|
||||
const author = req.user.id;
|
||||
const query = { _id: promptId, author };
|
||||
if (req.user.role === SystemRoles.ADMIN) {
|
||||
delete query.author;
|
||||
}
|
||||
const prompt = await getPrompt(query);
|
||||
res.status(200).send(prompt);
|
||||
});
|
||||
const prompt = await getPrompt({ _id: promptId });
|
||||
res.status(200).send(prompt);
|
||||
},
|
||||
);
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const author = req.user.id;
|
||||
const { groupId } = req.query;
|
||||
const query = { groupId, author };
|
||||
|
||||
// If requesting prompts for a specific group, check permissions
|
||||
if (groupId) {
|
||||
const permissions = await getEffectivePermissions({
|
||||
userId: req.user.id,
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: groupId,
|
||||
});
|
||||
|
||||
if (!(permissions & PermissionBits.VIEW)) {
|
||||
return res
|
||||
.status(403)
|
||||
.send({ error: 'Insufficient permissions to view prompts in this group' });
|
||||
}
|
||||
|
||||
// If user has access, fetch all prompts in the group (not just their own)
|
||||
const prompts = await getPrompts({ groupId });
|
||||
return res.status(200).send(prompts);
|
||||
}
|
||||
|
||||
// If no groupId, return user's own prompts
|
||||
const query = { author };
|
||||
if (req.user.role === SystemRoles.ADMIN) {
|
||||
delete query.author;
|
||||
}
|
||||
@@ -240,7 +453,8 @@ const deletePromptController = async (req, res) => {
|
||||
const deletePromptGroupController = async (req, res) => {
|
||||
try {
|
||||
const { groupId: _id } = req.params;
|
||||
const message = await deletePromptGroup({ _id, author: req.user.id, role: req.user.role });
|
||||
// Don't pass author - permissions are now checked by middleware
|
||||
const message = await deletePromptGroup({ _id, role: req.user.role });
|
||||
res.send(message);
|
||||
} catch (error) {
|
||||
logger.error('Error deleting prompt group', error);
|
||||
@@ -248,7 +462,22 @@ const deletePromptGroupController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
router.delete('/:promptId', checkPromptCreate, deletePromptController);
|
||||
router.delete('/groups/:groupId', checkPromptCreate, deletePromptGroupController);
|
||||
router.delete(
|
||||
'/:promptId',
|
||||
checkPromptCreate,
|
||||
canAccessPromptViaGroup({
|
||||
requiredPermission: PermissionBits.DELETE,
|
||||
resourceIdParam: 'promptId',
|
||||
}),
|
||||
deletePromptController,
|
||||
);
|
||||
router.delete(
|
||||
'/groups/:groupId',
|
||||
checkPromptCreate,
|
||||
canAccessPromptGroupResource({
|
||||
requiredPermission: PermissionBits.DELETE,
|
||||
}),
|
||||
deletePromptGroupController,
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
614
api/server/routes/prompts.test.js
Normal file
614
api/server/routes/prompts.test.js
Normal file
@@ -0,0 +1,614 @@
|
||||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const {
|
||||
SystemRoles,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
PrincipalType,
|
||||
PermissionBits,
|
||||
} = require('librechat-data-provider');
|
||||
|
||||
// Mock modules before importing
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCachedTools: jest.fn().mockResolvedValue({}),
|
||||
getCustomConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/Role', () => ({
|
||||
getRoleByName: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/middleware', () => ({
|
||||
requireJwtAuth: (req, res, next) => next(),
|
||||
canAccessPromptViaGroup: jest.requireActual('~/server/middleware').canAccessPromptViaGroup,
|
||||
canAccessPromptGroupResource:
|
||||
jest.requireActual('~/server/middleware').canAccessPromptGroupResource,
|
||||
}));
|
||||
|
||||
let app;
|
||||
let mongoServer;
|
||||
let promptRoutes;
|
||||
let Prompt, PromptGroup, AclEntry, AccessRole, User;
|
||||
let testUsers, testRoles;
|
||||
let grantPermission;
|
||||
|
||||
// Helper function to set user in middleware
|
||||
function setTestUser(app, user) {
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...(user.toObject ? user.toObject() : user),
|
||||
id: user.id || user._id.toString(),
|
||||
_id: user._id,
|
||||
name: user.name,
|
||||
role: user.role,
|
||||
};
|
||||
if (user.role === SystemRoles.ADMIN) {
|
||||
console.log('Setting admin user with role:', req.user.role);
|
||||
}
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
|
||||
// Initialize models
|
||||
const dbModels = require('~/db/models');
|
||||
Prompt = dbModels.Prompt;
|
||||
PromptGroup = dbModels.PromptGroup;
|
||||
AclEntry = dbModels.AclEntry;
|
||||
AccessRole = dbModels.AccessRole;
|
||||
User = dbModels.User;
|
||||
|
||||
// Import permission service
|
||||
const permissionService = require('~/server/services/PermissionService');
|
||||
grantPermission = permissionService.grantPermission;
|
||||
|
||||
// Create test data
|
||||
await setupTestData();
|
||||
|
||||
// Setup Express app
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Mock authentication middleware - default to owner
|
||||
setTestUser(app, testUsers.owner);
|
||||
|
||||
// Import routes after mocks are set up
|
||||
promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
async function setupTestData() {
|
||||
// Create access roles for promptGroups
|
||||
testRoles = {
|
||||
viewer: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
name: 'Viewer',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW,
|
||||
}),
|
||||
editor: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||
name: 'Editor',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
|
||||
}),
|
||||
owner: await AccessRole.create({
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
name: 'Owner',
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
permBits:
|
||||
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
|
||||
}),
|
||||
};
|
||||
|
||||
// Create test users
|
||||
testUsers = {
|
||||
owner: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Owner',
|
||||
email: 'owner@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
viewer: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Viewer',
|
||||
email: 'viewer@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
editor: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Editor',
|
||||
email: 'editor@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
noAccess: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'No Access',
|
||||
email: 'noaccess@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
admin: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Admin',
|
||||
email: 'admin@example.com',
|
||||
role: SystemRoles.ADMIN,
|
||||
}),
|
||||
};
|
||||
|
||||
// Mock getRoleByName
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
getRoleByName.mockImplementation((roleName) => {
|
||||
switch (roleName) {
|
||||
case SystemRoles.USER:
|
||||
return { permissions: { PROMPTS: { USE: true, CREATE: true } } };
|
||||
case SystemRoles.ADMIN:
|
||||
return { permissions: { PROMPTS: { USE: true, CREATE: true, SHARED_GLOBAL: true } } };
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('Prompt Routes - ACL Permissions', () => {
|
||||
let consoleErrorSpy;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
// Simple test to verify route is loaded
|
||||
it('should have routes loaded', async () => {
|
||||
// This should at least not crash
|
||||
const response = await request(app).get('/api/prompts/test-404');
|
||||
console.log('Test 404 response status:', response.status);
|
||||
console.log('Test 404 response body:', response.body);
|
||||
// We expect a 401 or 404, not 500
|
||||
expect(response.status).not.toBe(500);
|
||||
});
|
||||
|
||||
describe('POST /api/prompts - Create Prompt', () => {
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should create a prompt and grant owner permissions', async () => {
|
||||
const promptData = {
|
||||
prompt: {
|
||||
prompt: 'Test prompt content',
|
||||
type: 'text',
|
||||
},
|
||||
group: {
|
||||
name: 'Test Prompt Group',
|
||||
},
|
||||
};
|
||||
|
||||
const response = await request(app).post('/api/prompts').send(promptData);
|
||||
|
||||
if (response.status !== 200) {
|
||||
console.log('POST /api/prompts error status:', response.status);
|
||||
console.log('POST /api/prompts error body:', response.body);
|
||||
console.log('Console errors:', consoleErrorSpy.mock.calls);
|
||||
}
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.prompt).toBeDefined();
|
||||
expect(response.body.prompt.prompt).toBe(promptData.prompt.prompt);
|
||||
|
||||
// Check ACL entry was created
|
||||
const aclEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: response.body.prompt.groupId,
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
});
|
||||
|
||||
expect(aclEntry).toBeTruthy();
|
||||
expect(aclEntry.roleId.toString()).toBe(testRoles.owner._id.toString());
|
||||
});
|
||||
|
||||
it('should create a prompt group with prompt and grant owner permissions', async () => {
|
||||
const promptData = {
|
||||
prompt: {
|
||||
prompt: 'Group prompt content',
|
||||
// Remove 'name' from prompt - it's not in the schema
|
||||
},
|
||||
group: {
|
||||
name: 'Test Group',
|
||||
category: 'testing',
|
||||
},
|
||||
};
|
||||
|
||||
const response = await request(app).post('/api/prompts').send(promptData).expect(200);
|
||||
|
||||
expect(response.body.prompt).toBeDefined();
|
||||
expect(response.body.group).toBeDefined();
|
||||
expect(response.body.group.name).toBe(promptData.group.name);
|
||||
|
||||
// Check ACL entry was created for the promptGroup
|
||||
const aclEntry = await AclEntry.findOne({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: response.body.group._id,
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
});
|
||||
|
||||
expect(aclEntry).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/prompts/:promptId - Get Prompt', () => {
|
||||
let testPrompt;
|
||||
let testGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a prompt group first
|
||||
testGroup = await PromptGroup.create({
|
||||
name: 'Test Group',
|
||||
category: 'testing',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create a prompt
|
||||
testPrompt = await Prompt.create({
|
||||
prompt: 'Test prompt for retrieval',
|
||||
name: 'Get Test',
|
||||
author: testUsers.owner._id,
|
||||
type: 'text',
|
||||
groupId: testGroup._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should retrieve prompt when user has view permissions', async () => {
|
||||
// Grant view permissions on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
const response = await request(app).get(`/api/prompts/${testPrompt._id}`);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body._id).toBe(testPrompt._id.toString());
|
||||
expect(response.body.prompt).toBe(testPrompt.prompt);
|
||||
});
|
||||
|
||||
it('should deny access when user has no permissions', async () => {
|
||||
// Change the user to one without access
|
||||
setTestUser(app, testUsers.noAccess);
|
||||
|
||||
const response = await request(app).get(`/api/prompts/${testPrompt._id}`).expect(403);
|
||||
|
||||
// Verify error response
|
||||
expect(response.body.error).toBe('Forbidden');
|
||||
expect(response.body.message).toBe('Insufficient permissions to access this promptGroup');
|
||||
});
|
||||
|
||||
it('should allow admin access without explicit permissions', async () => {
|
||||
// First, reset the app to remove previous middleware
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Set admin user BEFORE adding routes
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.admin.toObject(),
|
||||
id: testUsers.admin._id.toString(),
|
||||
_id: testUsers.admin._id,
|
||||
name: testUsers.admin.name,
|
||||
role: testUsers.admin.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
|
||||
// Now add the routes
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
console.log('Admin user:', testUsers.admin);
|
||||
console.log('Admin role:', testUsers.admin.role);
|
||||
console.log('SystemRoles.ADMIN:', SystemRoles.ADMIN);
|
||||
|
||||
const response = await request(app).get(`/api/prompts/${testPrompt._id}`).expect(200);
|
||||
|
||||
expect(response.body._id).toBe(testPrompt._id.toString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/prompts/:promptId - Delete Prompt', () => {
|
||||
let testPrompt;
|
||||
let testGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create group with prompt
|
||||
testGroup = await PromptGroup.create({
|
||||
name: 'Delete Test Group',
|
||||
category: 'testing',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
testPrompt = await Prompt.create({
|
||||
prompt: 'Test prompt for deletion',
|
||||
name: 'Delete Test',
|
||||
author: testUsers.owner._id,
|
||||
type: 'text',
|
||||
groupId: testGroup._id,
|
||||
});
|
||||
|
||||
// Add prompt to group
|
||||
testGroup.productionId = testPrompt._id;
|
||||
testGroup.promptIds = [testPrompt._id];
|
||||
await testGroup.save();
|
||||
|
||||
// Grant owner permissions on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should delete prompt when user has delete permissions', async () => {
|
||||
const response = await request(app)
|
||||
.delete(`/api/prompts/${testPrompt._id}`)
|
||||
.query({ groupId: testGroup._id.toString() })
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.prompt).toBe('Prompt deleted successfully');
|
||||
|
||||
// Verify prompt was deleted
|
||||
const deletedPrompt = await Prompt.findById(testPrompt._id);
|
||||
expect(deletedPrompt).toBeNull();
|
||||
|
||||
// Verify ACL entries were removed
|
||||
const aclEntries = await AclEntry.find({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
});
|
||||
expect(aclEntries).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should deny deletion when user lacks delete permissions', async () => {
|
||||
// Create a prompt as a different user (not the one trying to delete)
|
||||
const authorPrompt = await Prompt.create({
|
||||
prompt: 'Test prompt by another user',
|
||||
name: 'Another User Prompt',
|
||||
author: testUsers.editor._id, // Different author
|
||||
type: 'text',
|
||||
groupId: testGroup._id,
|
||||
});
|
||||
|
||||
// Grant only viewer permissions to viewer user on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.editor._id,
|
||||
});
|
||||
|
||||
// Recreate app with viewer user
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.viewer.toObject(),
|
||||
id: testUsers.viewer._id.toString(),
|
||||
_id: testUsers.viewer._id,
|
||||
name: testUsers.viewer.name,
|
||||
role: testUsers.viewer.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
await request(app)
|
||||
.delete(`/api/prompts/${authorPrompt._id}`)
|
||||
.query({ groupId: testGroup._id.toString() })
|
||||
.expect(403);
|
||||
|
||||
// Verify prompt still exists
|
||||
const prompt = await Prompt.findById(authorPrompt._id);
|
||||
expect(prompt).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('PATCH /api/prompts/:promptId/tags/production - Make Production', () => {
|
||||
let testPrompt;
|
||||
let testGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create group
|
||||
testGroup = await PromptGroup.create({
|
||||
name: 'Production Test Group',
|
||||
category: 'testing',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
testPrompt = await Prompt.create({
|
||||
prompt: 'Test prompt for production',
|
||||
name: 'Production Test',
|
||||
author: testUsers.owner._id,
|
||||
type: 'text',
|
||||
groupId: testGroup._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should make prompt production when user has edit permissions', async () => {
|
||||
// Grant edit permissions on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Recreate app to ensure fresh middleware
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.owner.toObject(),
|
||||
id: testUsers.owner._id.toString(),
|
||||
_id: testUsers.owner._id,
|
||||
name: testUsers.owner.name,
|
||||
role: testUsers.owner.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/${testPrompt._id}/tags/production`)
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.message).toBe('Prompt production made successfully');
|
||||
|
||||
// Verify the group was updated
|
||||
const updatedGroup = await PromptGroup.findById(testGroup._id);
|
||||
expect(updatedGroup.productionId.toString()).toBe(testPrompt._id.toString());
|
||||
});
|
||||
|
||||
it('should deny making production when user lacks edit permissions', async () => {
|
||||
// Grant only view permissions to viewer on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.viewer._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Recreate app with viewer user
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.viewer.toObject(),
|
||||
id: testUsers.viewer._id.toString(),
|
||||
_id: testUsers.viewer._id,
|
||||
name: testUsers.viewer.name,
|
||||
role: testUsers.viewer.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
await request(app).patch(`/api/prompts/${testPrompt._id}/tags/production`).expect(403);
|
||||
|
||||
// Verify prompt hasn't changed
|
||||
const unchangedGroup = await PromptGroup.findById(testGroup._id);
|
||||
expect(unchangedGroup.productionId.toString()).not.toBe(testPrompt._id.toString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('Public Access', () => {
|
||||
let publicPrompt;
|
||||
let publicGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a prompt group
|
||||
publicGroup = await PromptGroup.create({
|
||||
name: 'Public Test Group',
|
||||
category: 'testing',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Create a public prompt
|
||||
publicPrompt = await Prompt.create({
|
||||
prompt: 'Public prompt content',
|
||||
name: 'Public Test',
|
||||
author: testUsers.owner._id,
|
||||
type: 'text',
|
||||
groupId: publicGroup._id,
|
||||
});
|
||||
|
||||
// Grant public viewer access on the promptGroup
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.PUBLIC,
|
||||
principalId: null,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: publicGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await Prompt.deleteMany({});
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should allow any user to view public prompts', async () => {
|
||||
// Change user to someone without explicit permissions
|
||||
setTestUser(app, testUsers.noAccess);
|
||||
|
||||
const response = await request(app).get(`/api/prompts/${publicPrompt._id}`).expect(200);
|
||||
|
||||
expect(response.body._id).toBe(publicPrompt._id.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,13 @@
|
||||
const express = require('express');
|
||||
const {
|
||||
SystemRoles,
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
memoryPermissionsSchema,
|
||||
agentPermissionsSchema,
|
||||
PermissionTypes,
|
||||
roleDefaults,
|
||||
SystemRoles,
|
||||
marketplacePermissionsSchema,
|
||||
peoplePickerPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkAdmin, requireJwtAuth } = require('~/server/middleware');
|
||||
const { updateRoleByName, getRoleByName } = require('~/models/Role');
|
||||
@@ -13,6 +15,81 @@ const { updateRoleByName, getRoleByName } = require('~/models/Role');
|
||||
const router = express.Router();
|
||||
router.use(requireJwtAuth);
|
||||
|
||||
/**
|
||||
* Permission configuration mapping
|
||||
* Maps route paths to their corresponding schemas and permission types
|
||||
*/
|
||||
const permissionConfigs = {
|
||||
prompts: {
|
||||
schema: promptPermissionsSchema,
|
||||
permissionType: PermissionTypes.PROMPTS,
|
||||
errorMessage: 'Invalid prompt permissions.',
|
||||
},
|
||||
agents: {
|
||||
schema: agentPermissionsSchema,
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
errorMessage: 'Invalid agent permissions.',
|
||||
},
|
||||
memories: {
|
||||
schema: memoryPermissionsSchema,
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
errorMessage: 'Invalid memory permissions.',
|
||||
},
|
||||
'people-picker': {
|
||||
schema: peoplePickerPermissionsSchema,
|
||||
permissionType: PermissionTypes.PEOPLE_PICKER,
|
||||
errorMessage: 'Invalid people picker permissions.',
|
||||
},
|
||||
marketplace: {
|
||||
schema: marketplacePermissionsSchema,
|
||||
permissionType: PermissionTypes.MARKETPLACE,
|
||||
errorMessage: 'Invalid marketplace permissions.',
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic handler for updating permissions
|
||||
* @param {string} permissionKey - The key from permissionConfigs
|
||||
* @returns {Function} Express route handler
|
||||
*/
|
||||
const createPermissionUpdateHandler = (permissionKey) => {
|
||||
const config = permissionConfigs[permissionKey];
|
||||
|
||||
return async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = config.schema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[config.permissionType] || role[config.permissionType] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[config.permissionType]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: config.errorMessage, error: error.errors });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* GET /api/roles/:roleName
|
||||
* Get a specific role by name
|
||||
@@ -45,117 +122,30 @@ router.get('/:roleName', async (req, res) => {
|
||||
* PUT /api/roles/:roleName/prompts
|
||||
* Update prompt permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/prompts', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['PROMPTS']} */
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = promptPermissionsSchema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.PROMPTS] || role[PermissionTypes.PROMPTS] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid prompt permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
router.put('/:roleName/prompts', checkAdmin, createPermissionUpdateHandler('prompts'));
|
||||
|
||||
/**
|
||||
* PUT /api/roles/:roleName/agents
|
||||
* Update agent permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/agents', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['AGENTS']} */
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = agentPermissionsSchema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.AGENTS] || role[PermissionTypes.AGENTS] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.AGENTS]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid agent permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
router.put('/:roleName/agents', checkAdmin, createPermissionUpdateHandler('agents'));
|
||||
|
||||
/**
|
||||
* PUT /api/roles/:roleName/memories
|
||||
* Update memory permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/memories', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['MEMORIES']} */
|
||||
const updates = req.body;
|
||||
router.put('/:roleName/memories', checkAdmin, createPermissionUpdateHandler('memories'));
|
||||
|
||||
try {
|
||||
const parsedUpdates = memoryPermissionsSchema.partial().parse(updates);
|
||||
/**
|
||||
* PUT /api/roles/:roleName/people-picker
|
||||
* Update people picker permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/people-picker', checkAdmin, createPermissionUpdateHandler('people-picker'));
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.MEMORIES] || role[PermissionTypes.MEMORIES] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid memory permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
/**
|
||||
* PUT /api/roles/:roleName/marketplace
|
||||
* Update marketplace permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/marketplace', checkAdmin, createPermissionUpdateHandler('marketplace'));
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -5,7 +5,7 @@ jest.mock('~/models', () => ({
|
||||
}));
|
||||
jest.mock('~/models/Role', () => ({
|
||||
updateAccessPermissions: jest.fn(),
|
||||
getRoleByName: jest.fn(),
|
||||
getRoleByName: jest.fn().mockResolvedValue(null),
|
||||
updateRoleByName: jest.fn(),
|
||||
}));
|
||||
|
||||
@@ -89,4 +89,76 @@ describe('AppService interface configuration', () => {
|
||||
expect(app.locals.interfaceConfig.bookmarks).toBe(false);
|
||||
expect(loadDefaultInterface).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions including roles', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
});
|
||||
expect(loadDefaultInterface).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle mixed peoplePicker permissions', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(false);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
|
||||
});
|
||||
|
||||
it('should set default peoplePicker permissions when not provided', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
const { agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const { loadMemoryConfig, agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
loadOCRConfig,
|
||||
EModelEndpoint,
|
||||
loadMemoryConfig,
|
||||
getConfigDefaults,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -87,6 +86,7 @@ const AppService = async (app) => {
|
||||
const turnstileConfig = loadTurnstileConfig(config, configDefaults);
|
||||
|
||||
const defaultLocals = {
|
||||
config,
|
||||
ocr,
|
||||
paths,
|
||||
memory,
|
||||
@@ -159,6 +159,10 @@ const AppService = async (app) => {
|
||||
}
|
||||
});
|
||||
|
||||
if (endpoints?.all) {
|
||||
endpointLocals.all = endpoints.all;
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
...defaultLocals,
|
||||
fileConfig: config?.fileConfig,
|
||||
|
||||
@@ -33,6 +33,7 @@ jest.mock('~/models', () => ({
|
||||
}));
|
||||
jest.mock('~/models/Role', () => ({
|
||||
updateAccessPermissions: jest.fn(),
|
||||
getRoleByName: jest.fn().mockResolvedValue(null),
|
||||
}));
|
||||
jest.mock('./Config', () => ({
|
||||
setCachedTools: jest.fn(),
|
||||
@@ -133,6 +134,9 @@ describe('AppService', () => {
|
||||
expect(process.env.CDN_PROVIDER).toEqual('testStrategy');
|
||||
|
||||
expect(app.locals).toEqual({
|
||||
config: expect.objectContaining({
|
||||
fileStrategy: 'testStrategy',
|
||||
}),
|
||||
socialLogins: ['testLogin'],
|
||||
fileStrategy: 'testStrategy',
|
||||
interfaceConfig: expect.objectContaining({
|
||||
@@ -167,6 +171,9 @@ describe('AppService', () => {
|
||||
agents: {
|
||||
disableBuilder: false,
|
||||
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
|
||||
maxCitations: 30,
|
||||
maxCitationsPerFile: 7,
|
||||
minRelevanceScore: 0.45,
|
||||
},
|
||||
});
|
||||
});
|
||||
@@ -545,6 +552,206 @@ describe('AppService', () => {
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should correctly configure endpoint with titlePrompt, titleMethod, and titlePromptTemplate', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
},
|
||||
[EModelEndpoint.assistants]: {
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
},
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: azureGroups,
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check OpenAI endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toEqual(
|
||||
expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
}),
|
||||
);
|
||||
|
||||
// Check Assistants endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.assistants);
|
||||
expect(app.locals[EModelEndpoint.assistants]).toMatchObject({
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
});
|
||||
|
||||
// Check Azure OpenAI endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.azureOpenAI);
|
||||
expect(app.locals[EModelEndpoint.azureOpenAI]).toEqual(
|
||||
expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure Agent endpoint with title generation settings', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.agents]: {
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: [AgentCapabilities.tools, AgentCapabilities.actions],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
|
||||
expect(app.locals[EModelEndpoint.agents]).toMatchObject({
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: expect.arrayContaining([AgentCapabilities.tools, AgentCapabilities.actions]),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing title configuration options with defaults', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
// titlePrompt and titlePromptTemplate are not provided
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
});
|
||||
// Check that the optional fields are undefined when not provided
|
||||
expect(app.locals[EModelEndpoint.openAI].titlePrompt).toBeUndefined();
|
||||
expect(app.locals[EModelEndpoint.openAI].titlePromptTemplate).toBeUndefined();
|
||||
expect(app.locals[EModelEndpoint.openAI].titleMethod).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should correctly configure titleEndpoint when specified', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
},
|
||||
[EModelEndpoint.agents]: {
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check OpenAI endpoint has titleEndpoint
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
});
|
||||
|
||||
// Check Agents endpoint has titleEndpoint
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
|
||||
expect(app.locals[EModelEndpoint.agents]).toMatchObject({
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly configure all endpoint when specified', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
},
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check that 'all' endpoint config is loaded
|
||||
expect(app.locals).toHaveProperty('all');
|
||||
expect(app.locals.all).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
});
|
||||
|
||||
// Check that OpenAI endpoint has its own config
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('AppService updating app.locals and issuing warnings', () => {
|
||||
@@ -572,6 +779,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
|
||||
expect(app.locals).toBeDefined();
|
||||
expect(app.locals.paths).toBeDefined();
|
||||
expect(app.locals.config).toEqual({});
|
||||
expect(app.locals.fileStrategy).toEqual(FileSources.local);
|
||||
expect(app.locals.socialLogins).toEqual(defaultSocialLogins);
|
||||
expect(app.locals.balance).toEqual(
|
||||
@@ -604,6 +812,7 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
|
||||
expect(app.locals).toBeDefined();
|
||||
expect(app.locals.paths).toBeDefined();
|
||||
expect(app.locals.config).toEqual(customConfig);
|
||||
expect(app.locals.fileStrategy).toEqual(customConfig.fileStrategy);
|
||||
expect(app.locals.socialLogins).toEqual(customConfig.registration.socialLogins);
|
||||
expect(app.locals.balance).toEqual(customConfig.balance);
|
||||
@@ -761,4 +970,48 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
|
||||
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions when specified', async () => {
|
||||
const mockConfig = {
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
|
||||
|
||||
const app = { locals: {} };
|
||||
await AppService(app);
|
||||
|
||||
// Check that interface config includes the permissions
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default peoplePicker permissions when not specified', async () => {
|
||||
const mockConfig = {
|
||||
interface: {
|
||||
// No peoplePicker configuration
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
|
||||
|
||||
const app = { locals: {} };
|
||||
await AppService(app);
|
||||
|
||||
// Check that default permissions are applied
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -60,7 +60,14 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
|
||||
|
||||
// Find boundaries between ARTIFACT_START and ARTIFACT_END
|
||||
const contentStart = artifactContent.indexOf('\n', artifactContent.indexOf(ARTIFACT_START)) + 1;
|
||||
const contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
|
||||
let contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
|
||||
|
||||
// Special case: if contentEnd is 0, it means the only ::: found is at the start of :::artifact
|
||||
// This indicates an incomplete artifact (no closing :::)
|
||||
// We need to check that it's exactly at position 0 (the beginning of artifactContent)
|
||||
if (contentEnd === 0 && artifactContent.indexOf(ARTIFACT_START) === 0) {
|
||||
contentEnd = artifactContent.length;
|
||||
}
|
||||
|
||||
if (contentStart === -1 || contentEnd === -1) {
|
||||
return null;
|
||||
@@ -72,12 +79,20 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
|
||||
|
||||
// Determine where to look for the original content
|
||||
let searchStart, searchEnd;
|
||||
if (codeBlockStart !== -1 && codeBlockEnd !== -1) {
|
||||
// If code blocks exist, search between them
|
||||
if (codeBlockStart !== -1) {
|
||||
// Code block starts
|
||||
searchStart = codeBlockStart + 4; // after ```\n
|
||||
searchEnd = codeBlockEnd;
|
||||
|
||||
if (codeBlockEnd !== -1 && codeBlockEnd > codeBlockStart) {
|
||||
// Code block has proper ending
|
||||
searchEnd = codeBlockEnd;
|
||||
} else {
|
||||
// No closing backticks found or they're before the opening (shouldn't happen)
|
||||
// This might be an incomplete artifact - search to contentEnd
|
||||
searchEnd = contentEnd;
|
||||
}
|
||||
} else {
|
||||
// Otherwise search in the whole artifact content
|
||||
// No code blocks at all
|
||||
searchStart = contentStart;
|
||||
searchEnd = contentEnd;
|
||||
}
|
||||
|
||||
@@ -89,9 +89,9 @@ describe('replaceArtifactContent', () => {
|
||||
};
|
||||
|
||||
test('should replace content within artifact boundaries', () => {
|
||||
const original = 'console.log(\'hello\')';
|
||||
const original = "console.log('hello')";
|
||||
const artifact = createTestArtifact(original);
|
||||
const updated = 'console.log(\'updated\')';
|
||||
const updated = "console.log('updated')";
|
||||
|
||||
const result = replaceArtifactContent(artifact.text, artifact, original, updated);
|
||||
expect(result).toContain(updated);
|
||||
@@ -317,4 +317,182 @@ console.log(greeting);`;
|
||||
expect(result).not.toContain('\n\n```');
|
||||
expect(result).not.toContain('```\n\n');
|
||||
});
|
||||
|
||||
describe('incomplete artifacts', () => {
|
||||
test('should handle incomplete artifacts (missing closing ::: and ```)', () => {
|
||||
const original = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Pomodoro</title>
|
||||
<meta name="description" content="A single-file Pomodoro timer with logs, charts, sounds, and dark mode." />
|
||||
<style>
|
||||
:root{`;
|
||||
|
||||
const prefix = `Awesome idea! I'll deliver a complete single-file HTML app called "Pomodoro" with:
|
||||
- Custom session/break durations
|
||||
|
||||
You can save this as pomodoro.html and open it directly in your browser.
|
||||
|
||||
`;
|
||||
|
||||
// This simulates the real incomplete artifact case - no closing ``` or :::
|
||||
const incompleteArtifact = `${ARTIFACT_START}{identifier="pomodoro-single-file-app" type="text/html" title="Pomodoro — Single File App"}
|
||||
\`\`\`
|
||||
${original}`;
|
||||
|
||||
const fullText = prefix + incompleteArtifact;
|
||||
const message = { text: fullText };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
expect(artifacts[0].end).toBe(fullText.length);
|
||||
|
||||
const updated = original.replace('Pomodoro</title>', 'Pomodoro</title>UPDATED');
|
||||
const result = replaceArtifactContent(fullText, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
expect(result).toContain(prefix);
|
||||
// Should not have added closing markers
|
||||
expect(result).not.toMatch(/:::\s*$/);
|
||||
});
|
||||
|
||||
test('should handle incomplete artifacts with only opening code block', () => {
|
||||
const original = 'function hello() { console.log("world"); }';
|
||||
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n\`\`\`\n${original}`;
|
||||
|
||||
const message = { text: incompleteArtifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'function hello() { console.log("UPDATED"); }';
|
||||
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
});
|
||||
|
||||
test('should handle incomplete artifacts without code blocks', () => {
|
||||
const original = 'Some plain text content';
|
||||
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n${original}`;
|
||||
|
||||
const message = { text: incompleteArtifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'Some UPDATED text content';
|
||||
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
});
|
||||
});
|
||||
|
||||
describe('regression tests for edge cases', () => {
|
||||
test('should still handle complete artifacts correctly', () => {
|
||||
// Ensure we didn't break normal artifact handling
|
||||
const original = 'console.log("test");';
|
||||
const artifact = createArtifactText({ content: original });
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'console.log("updated");';
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain(updated);
|
||||
expect(result).toContain(ARTIFACT_END);
|
||||
expect(result).toMatch(/```\nconsole\.log\("updated"\);\n```/);
|
||||
});
|
||||
|
||||
test('should handle multiple complete artifacts', () => {
|
||||
// Ensure multiple artifacts still work
|
||||
const content1 = 'First artifact';
|
||||
const content2 = 'Second artifact';
|
||||
const text = `${createArtifactText({ content: content1 })}\n\n${createArtifactText({ content: content2 })}`;
|
||||
|
||||
const message = { text };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(2);
|
||||
|
||||
// Update first artifact
|
||||
const result1 = replaceArtifactContent(text, artifacts[0], content1, 'First UPDATED');
|
||||
expect(result1).not.toBeNull();
|
||||
expect(result1).toContain('First UPDATED');
|
||||
expect(result1).toContain(content2);
|
||||
|
||||
// Update second artifact
|
||||
const result2 = replaceArtifactContent(text, artifacts[1], content2, 'Second UPDATED');
|
||||
expect(result2).not.toBeNull();
|
||||
expect(result2).toContain(content1);
|
||||
expect(result2).toContain('Second UPDATED');
|
||||
});
|
||||
|
||||
test('should not mistake ::: at position 0 for artifact end in complete artifacts', () => {
|
||||
// This tests the specific fix - ensuring contentEnd=0 doesn't break complete artifacts
|
||||
const original = 'test content';
|
||||
// Create an artifact that will have ::: at position 0 when substring'd
|
||||
const artifact = `${ARTIFACT_START}\n\`\`\`\n${original}\n\`\`\`\n${ARTIFACT_END}`;
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'updated content';
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain(updated);
|
||||
expect(result).toContain(ARTIFACT_END);
|
||||
});
|
||||
|
||||
test('should handle empty artifacts', () => {
|
||||
// Edge case: empty artifact
|
||||
const artifact = `${ARTIFACT_START}\n${ARTIFACT_END}`;
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
// Trying to replace non-existent content should return null
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], 'something', 'updated');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('should preserve whitespace and formatting in complete artifacts', () => {
|
||||
const original = ` function test() {
|
||||
return {
|
||||
value: 42
|
||||
};
|
||||
}`;
|
||||
const artifact = createArtifactText({ content: original });
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
const updated = ` function test() {
|
||||
return {
|
||||
value: 100
|
||||
};
|
||||
}`;
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('value: 100');
|
||||
// Should preserve exact formatting
|
||||
expect(result).toMatch(
|
||||
/```\n {2}function test\(\) \{\n {4}return \{\n {6}value: 100\n {4}\};\n {2}\}\n```/,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -281,7 +281,7 @@ function createInProgressHandler(openai, thread_id, messages) {
|
||||
|
||||
openai.seenCompletedMessages.add(message_id);
|
||||
|
||||
const message = await openai.beta.threads.messages.retrieve(thread_id, message_id);
|
||||
const message = await openai.beta.threads.messages.retrieve(message_id, { thread_id });
|
||||
if (!message?.content?.length) {
|
||||
return;
|
||||
}
|
||||
@@ -435,9 +435,11 @@ async function runAssistant({
|
||||
};
|
||||
});
|
||||
|
||||
const outputs = await processRequiredActions(openai, actions);
|
||||
|
||||
const toolRun = await openai.beta.threads.runs.submitToolOutputs(run.thread_id, run.id, outputs);
|
||||
const tool_outputs = await processRequiredActions(openai, actions);
|
||||
const toolRun = await openai.beta.threads.runs.submitToolOutputs(run.id, {
|
||||
thread_id: run.thread_id,
|
||||
tool_outputs,
|
||||
});
|
||||
|
||||
// Recursive call with accumulated steps and messages
|
||||
return await runAssistant({
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user