Compare commits
106 Commits
feat/mcp-o
...
feat/clien
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
014060a11b | ||
|
|
80d406b629 | ||
|
|
22035bbf95 | ||
|
|
17a164b420 | ||
|
|
5b663d0e35 | ||
|
|
c273dfc1f4 | ||
|
|
de69bcdd64 | ||
|
|
59412c2b36 | ||
|
|
0af8fba7ca | ||
|
|
d5cf83313b | ||
|
|
355da0fc2e | ||
|
|
8ba5aa6055 | ||
|
|
f1204531a8 | ||
|
|
a65e33758d | ||
|
|
0855631c54 | ||
|
|
a1e052871f | ||
|
|
ca18ada9e2 | ||
|
|
103af99879 | ||
|
|
412948e025 | ||
|
|
4da25826d9 | ||
|
|
6e7fdeb3a3 | ||
|
|
1c3f5b972d | ||
|
|
e7aa83e073 | ||
|
|
0b5155d277 | ||
|
|
86deb4d19a | ||
|
|
f741a59ec4 | ||
|
|
83477bba34 | ||
|
|
5476029bca | ||
|
|
05a0a1f7cd | ||
|
|
55f67212d5 | ||
|
|
63d0c301a0 | ||
|
|
bac6e499b7 | ||
|
|
59de92afa9 | ||
|
|
e297386cee | ||
|
|
e2b1cc607f | ||
|
|
55bda03d19 | ||
|
|
b11ba35790 | ||
|
|
8e1b00da2a | ||
|
|
39f5dd47dc | ||
|
|
16f83c6e8e | ||
|
|
b71a82d0e9 | ||
|
|
63a5902404 | ||
|
|
21c3a831c3 | ||
|
|
ae43b4eed0 | ||
|
|
2af4ca5b5c | ||
|
|
b6c7b0bc71 | ||
|
|
f8738b207c | ||
|
|
6ea1d5eab2 | ||
|
|
992911514c | ||
|
|
9289aeb2ba | ||
|
|
898d273aaf | ||
|
|
5859350bcb | ||
|
|
882cca247a | ||
|
|
0b7dd55797 | ||
|
|
9f270127d3 | ||
|
|
1380db85cb | ||
|
|
dcaa5af598 | ||
|
|
545a909953 | ||
|
|
cd436dc6a8 | ||
|
|
e75beb92b3 | ||
|
|
5251246313 | ||
|
|
26f23c6aaf | ||
|
|
1636af1f27 | ||
|
|
b050a0bf1e | ||
|
|
deb928bf80 | ||
|
|
21005b66cc | ||
|
|
3dc9e85fab | ||
|
|
ec67cf2d3a | ||
|
|
1fe977e48f | ||
|
|
01470ef9fd | ||
|
|
bef5c26bed | ||
|
|
9e03fef9db | ||
|
|
283c9cff6f | ||
|
|
0aafdc0a86 | ||
|
|
365e3bca95 | ||
|
|
a01536ddb7 | ||
|
|
8a3ff62ee6 | ||
|
|
74d8a3824c | ||
|
|
62c3f135e7 | ||
|
|
baf3b4ad08 | ||
|
|
e5d08ccdf1 | ||
|
|
5178507b1c | ||
|
|
f797e90d79 | ||
|
|
259224d986 | ||
|
|
13789ab261 | ||
|
|
faaba30af1 | ||
|
|
14660d75ae | ||
|
|
aec1777a90 | ||
|
|
90c43dd451 | ||
|
|
4c754c1190 | ||
|
|
f70e0cf849 | ||
|
|
d0c958ba33 | ||
|
|
0761e65086 | ||
|
|
0bf708915b | ||
|
|
cf59f1ab45 | ||
|
|
445e9eae85 | ||
|
|
cd9c578907 | ||
|
|
ac94c73f23 | ||
|
|
dfef7c31d2 | ||
|
|
0b1b0af741 | ||
|
|
0a169a1ff6 | ||
|
|
4b12ea327a | ||
|
|
35d8ef50f4 | ||
|
|
1dabe96404 | ||
|
|
7f8c327509 | ||
|
|
52bbac3a37 |
@@ -627,6 +627,15 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# Redis connection limits
|
||||
# REDIS_MAX_LISTENERS=40
|
||||
|
||||
# Redis ping interval in seconds (0 = disabled, >0 = enabled)
|
||||
# When set to a positive integer, Redis clients will ping the server at this interval to keep connections alive
|
||||
# When unset or 0, no pinging is performed (recommended for most use cases)
|
||||
# REDIS_PING_INTERVAL=300
|
||||
|
||||
# Force specific cache namespaces to use in-memory storage even when Redis is enabled
|
||||
# Comma-separated list of CacheKeys (e.g., STATIC_CONFIG,ROLES,MESSAGES)
|
||||
# FORCED_IN_MEMORY_CACHE_NAMESPACES=STATIC_CONFIG,ROLES
|
||||
|
||||
#==================================================#
|
||||
# Others #
|
||||
#==================================================#
|
||||
|
||||
2
.github/workflows/backend-review.yml
vendored
2
.github/workflows/backend-review.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
- release/*
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'packages/api/**'
|
||||
- 'packages/**'
|
||||
jobs:
|
||||
tests_Backend:
|
||||
name: Run Backend unit tests
|
||||
|
||||
58
.github/workflows/client.yml
vendored
Normal file
58
.github/workflows/client.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Publish `@librechat/client` to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'packages/client/package.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
description: 'Reason for manual trigger'
|
||||
required: false
|
||||
default: 'Manual publish requested'
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
|
||||
- name: Install client dependencies
|
||||
run: cd packages/client && npm ci
|
||||
|
||||
- name: Build client
|
||||
run: cd packages/client && npm run build
|
||||
|
||||
- name: Set up npm authentication
|
||||
run: echo "//registry.npmjs.org/:_authToken=${{ secrets.PUBLISH_NPM_TOKEN }}" > ~/.npmrc
|
||||
|
||||
- name: Check version change
|
||||
id: check
|
||||
working-directory: packages/client
|
||||
run: |
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
PUBLISHED_VERSION=$(npm view @librechat/client version 2>/dev/null || echo "0.0.0")
|
||||
if [ "$PACKAGE_VERSION" = "$PUBLISHED_VERSION" ]; then
|
||||
echo "No version change, skipping publish"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Version changed, proceeding with publish"
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Pack package
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/client
|
||||
run: npm pack
|
||||
|
||||
- name: Publish
|
||||
if: steps.check.outputs.skip != 'true'
|
||||
working-directory: packages/client
|
||||
run: npm publish *.tgz --access public
|
||||
2
.github/workflows/frontend-review.yml
vendored
2
.github/workflows/frontend-review.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
- release/*
|
||||
paths:
|
||||
- 'client/**'
|
||||
- 'packages/**'
|
||||
- 'packages/data-provider/**'
|
||||
|
||||
jobs:
|
||||
tests_frontend_ubuntu:
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
name: Generate Release Changelog PR
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-release-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository (with full history).
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
# 2. Generate the release changelog using our custom configuration.
|
||||
- name: Generate Release Changelog
|
||||
id: generate_release
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-release.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-release.md
|
||||
|
||||
# 3. Update the main CHANGELOG.md:
|
||||
# - If it doesn't exist, create it with a basic header.
|
||||
# - Remove the "Unreleased" section (if present).
|
||||
# - Prepend the new release changelog above previous releases.
|
||||
# - Remove all temporary files before committing.
|
||||
- name: Update CHANGELOG.md
|
||||
run: |
|
||||
# Determine the release tag, e.g. "v1.2.3"
|
||||
TAG=${GITHUB_REF##*/}
|
||||
echo "Using release tag: $TAG"
|
||||
|
||||
# Ensure CHANGELOG.md exists; if not, create a basic header.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Remove the "Unreleased" section (from "## [Unreleased]" until the first occurrence of '---') if it exists.
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{flag=1} flag && /^---/{flag=0; next} !flag' CHANGELOG.md > CHANGELOG.cleaned
|
||||
else
|
||||
cp CHANGELOG.md CHANGELOG.cleaned
|
||||
fi
|
||||
|
||||
# Split the cleaned file into:
|
||||
# - header.md: content before the first release header ("## [v...").
|
||||
# - tail.md: content from the first release header onward.
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.cleaned > header.md
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.cleaned > tail.md
|
||||
|
||||
# Combine header, the new release changelog, and the tail.
|
||||
echo "Combining updated changelog parts..."
|
||||
cat header.md CHANGELOG-release.md > CHANGELOG.md.new
|
||||
echo "" >> CHANGELOG.md.new
|
||||
cat tail.md >> CHANGELOG.md.new
|
||||
|
||||
mv CHANGELOG.md.new CHANGELOG.md
|
||||
|
||||
# Remove temporary files.
|
||||
rm -f CHANGELOG.cleaned header.md tail.md CHANGELOG-release.md
|
||||
|
||||
echo "Final CHANGELOG.md content:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 4. Create (or update) the Pull Request with the updated CHANGELOG.md.
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sign-commits: true
|
||||
commit-message: "chore: update CHANGELOG for release ${{ github.ref_name }}"
|
||||
base: main
|
||||
branch: "changelog/${{ github.ref_name }}"
|
||||
reviewers: danny-avila
|
||||
title: "📜 docs: Changelog for release ${{ github.ref_name }}"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the CHANGELOG.md by removing the "Unreleased" section and adding new release notes for release ${{ github.ref_name }} above previous releases.
|
||||
@@ -1,107 +0,0 @@
|
||||
name: Generate Unreleased Changelog PR
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Runs every Monday at 00:00 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
generate-unreleased-changelog-pr:
|
||||
permissions:
|
||||
contents: write # Needed for pushing commits and creating branches.
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# 1. Checkout the repository on main.
|
||||
- name: Checkout Repository on Main
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
# 4. Get the latest version tag.
|
||||
- name: Get Latest Tag
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1) || echo "none")
|
||||
echo "Latest tag: $LATEST_TAG"
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
# 5. Generate the Unreleased changelog.
|
||||
- name: Generate Unreleased Changelog
|
||||
id: generate_unreleased
|
||||
uses: mikepenz/release-changelog-builder-action@v5.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
configuration: ".github/configuration-unreleased.json"
|
||||
owner: ${{ github.repository_owner }}
|
||||
repo: ${{ github.event.repository.name }}
|
||||
outputFile: CHANGELOG-unreleased.md
|
||||
fromTag: ${{ steps.get_latest_tag.outputs.tag }}
|
||||
toTag: main
|
||||
|
||||
# 7. Update CHANGELOG.md with the new Unreleased section.
|
||||
- name: Update CHANGELOG.md
|
||||
id: update_changelog
|
||||
run: |
|
||||
# Create CHANGELOG.md if it doesn't exist.
|
||||
if [ ! -f CHANGELOG.md ]; then
|
||||
echo "# Changelog" > CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
echo "All notable changes to this project will be documented in this file." >> CHANGELOG.md
|
||||
echo "" >> CHANGELOG.md
|
||||
fi
|
||||
|
||||
echo "Updating CHANGELOG.md…"
|
||||
|
||||
# Extract content before the "## [Unreleased]" (or first version header if missing).
|
||||
if grep -q "^## \[Unreleased\]" CHANGELOG.md; then
|
||||
awk '/^## \[Unreleased\]/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
else
|
||||
awk '/^## \[v/{exit} {print}' CHANGELOG.md > CHANGELOG_TMP.md
|
||||
fi
|
||||
|
||||
# Append the generated Unreleased changelog.
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
cat CHANGELOG-unreleased.md >> CHANGELOG_TMP.md
|
||||
echo "" >> CHANGELOG_TMP.md
|
||||
|
||||
# Append the remainder of the original changelog (starting from the first version header).
|
||||
awk 'f{print} /^## \[v/{f=1; print}' CHANGELOG.md >> CHANGELOG_TMP.md
|
||||
|
||||
# Replace the old file with the updated file.
|
||||
mv CHANGELOG_TMP.md CHANGELOG.md
|
||||
|
||||
# Remove the temporary generated file.
|
||||
rm -f CHANGELOG-unreleased.md
|
||||
|
||||
echo "Final CHANGELOG.md:"
|
||||
cat CHANGELOG.md
|
||||
|
||||
# 8. Check if CHANGELOG.md has any updates.
|
||||
- name: Check for CHANGELOG.md changes
|
||||
id: changelog_changes
|
||||
run: |
|
||||
if git diff --quiet CHANGELOG.md; then
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# 9. Create (or update) the Pull Request only if there are changes.
|
||||
- name: Create Pull Request
|
||||
if: steps.changelog_changes.outputs.has_changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
base: main
|
||||
branch: "changelog/unreleased-update"
|
||||
sign-commits: true
|
||||
commit-message: "action: update Unreleased changelog"
|
||||
title: "📜 docs: Unreleased Changelog"
|
||||
body: |
|
||||
**Description**:
|
||||
- This PR updates the Unreleased section in CHANGELOG.md.
|
||||
- It compares the current main branch with the latest version tag (determined as ${{ steps.get_latest_tag.outputs.tag }}),
|
||||
regenerates the Unreleased changelog, removes any old Unreleased block, and inserts the new content.
|
||||
3
.github/workflows/i18n-unused-keys.yml
vendored
3
.github/workflows/i18n-unused-keys.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- "client/src/**"
|
||||
- "api/**"
|
||||
- "packages/data-provider/src/**"
|
||||
- "packages/client/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
@@ -23,7 +24,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
|
||||
101
.github/workflows/unused-packages.yml
vendored
101
.github/workflows/unused-packages.yml
vendored
@@ -7,6 +7,7 @@ on:
|
||||
- 'package-lock.json'
|
||||
- 'client/**'
|
||||
- 'api/**'
|
||||
- 'packages/client/**'
|
||||
|
||||
jobs:
|
||||
detect-unused-packages:
|
||||
@@ -28,7 +29,7 @@ jobs:
|
||||
|
||||
- name: Validate JSON files
|
||||
run: |
|
||||
for FILE in package.json client/package.json api/package.json; do
|
||||
for FILE in package.json client/package.json api/package.json packages/client/package.json; do
|
||||
if [[ -f "$FILE" ]]; then
|
||||
jq empty "$FILE" || (echo "::error title=Invalid JSON::$FILE is invalid" && exit 1)
|
||||
fi
|
||||
@@ -63,12 +64,31 @@ jobs:
|
||||
local folder=$1
|
||||
local output_file=$2
|
||||
if [[ -d "$folder" ]]; then
|
||||
grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
# Extract require() statements
|
||||
grep -rEho "require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/require\\(['\"]([a-zA-Z0-9@/._-]+)['\"]\\)/\1/" > "$output_file"
|
||||
|
||||
grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,mjs,cjs} | \
|
||||
# Extract ES6 imports - various patterns
|
||||
# import x from 'module'
|
||||
grep -rEho "import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/import .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# import 'module' (side-effect imports)
|
||||
grep -rEho "import ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/import ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# export { x } from 'module' or export * from 'module'
|
||||
grep -rEho "export .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{js,ts,tsx,jsx,mjs,cjs} | \
|
||||
sed -E "s/export .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# import type { x } from 'module' (TypeScript)
|
||||
grep -rEho "import type .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]" "$folder" --include=\*.{ts,tsx} | \
|
||||
sed -E "s/import type .* from ['\"]([a-zA-Z0-9@/._-]+)['\"]/\1/" >> "$output_file"
|
||||
|
||||
# Remove subpath imports but keep the base package
|
||||
# e.g., '@tanstack/react-query/devtools' becomes '@tanstack/react-query'
|
||||
sed -i -E 's|^(@?[a-zA-Z0-9-]+(/[a-zA-Z0-9-]+)?)/.*|\1|' "$output_file"
|
||||
|
||||
sort -u "$output_file" -o "$output_file"
|
||||
else
|
||||
touch "$output_file"
|
||||
@@ -78,13 +98,80 @@ jobs:
|
||||
extract_deps_from_code "." root_used_code.txt
|
||||
extract_deps_from_code "client" client_used_code.txt
|
||||
extract_deps_from_code "api" api_used_code.txt
|
||||
|
||||
# Extract dependencies used by @librechat/client package
|
||||
extract_deps_from_code "packages/client" packages_client_used_code.txt
|
||||
|
||||
- name: Get @librechat/client dependencies
|
||||
id: get-librechat-client-deps
|
||||
run: |
|
||||
if [[ -f "packages/client/package.json" ]]; then
|
||||
# Get all dependencies from @librechat/client (dependencies, devDependencies, and peerDependencies)
|
||||
DEPS=$(jq -r '.dependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
DEV_DEPS=$(jq -r '.devDependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
PEER_DEPS=$(jq -r '.peerDependencies // {} | keys[]' packages/client/package.json 2>/dev/null || echo "")
|
||||
|
||||
# Combine all dependencies
|
||||
echo "$DEPS" > librechat_client_deps.txt
|
||||
echo "$DEV_DEPS" >> librechat_client_deps.txt
|
||||
echo "$PEER_DEPS" >> librechat_client_deps.txt
|
||||
|
||||
# Also include dependencies that are imported in packages/client
|
||||
cat packages_client_used_code.txt >> librechat_client_deps.txt
|
||||
|
||||
# Remove empty lines and sort
|
||||
grep -v '^$' librechat_client_deps.txt | sort -u > temp_deps.txt
|
||||
mv temp_deps.txt librechat_client_deps.txt
|
||||
else
|
||||
touch librechat_client_deps.txt
|
||||
fi
|
||||
|
||||
- name: Extract Workspace Dependencies
|
||||
id: extract-workspace-deps
|
||||
run: |
|
||||
# Function to get dependencies from a workspace package that are used by another package
|
||||
get_workspace_package_deps() {
|
||||
local package_json=$1
|
||||
local output_file=$2
|
||||
|
||||
# Get all workspace dependencies (starting with @librechat/)
|
||||
if [[ -f "$package_json" ]]; then
|
||||
local workspace_deps=$(jq -r '.dependencies // {} | to_entries[] | select(.key | startswith("@librechat/")) | .key' "$package_json" 2>/dev/null || echo "")
|
||||
|
||||
# For each workspace dependency, get its dependencies
|
||||
for dep in $workspace_deps; do
|
||||
# Convert @librechat/api to packages/api
|
||||
local workspace_path=$(echo "$dep" | sed 's/@librechat\//packages\//')
|
||||
local workspace_package_json="${workspace_path}/package.json"
|
||||
|
||||
if [[ -f "$workspace_package_json" ]]; then
|
||||
# Extract all dependencies from the workspace package
|
||||
jq -r '.dependencies // {} | keys[]' "$workspace_package_json" 2>/dev/null >> "$output_file"
|
||||
# Also extract peerDependencies
|
||||
jq -r '.peerDependencies // {} | keys[]' "$workspace_package_json" 2>/dev/null >> "$output_file"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -f "$output_file" ]]; then
|
||||
sort -u "$output_file" -o "$output_file"
|
||||
else
|
||||
touch "$output_file"
|
||||
fi
|
||||
}
|
||||
|
||||
# Get workspace dependencies for each package
|
||||
get_workspace_package_deps "package.json" root_workspace_deps.txt
|
||||
get_workspace_package_deps "client/package.json" client_workspace_deps.txt
|
||||
get_workspace_package_deps "api/package.json" api_workspace_deps.txt
|
||||
|
||||
- name: Run depcheck for root package.json
|
||||
id: check-root
|
||||
run: |
|
||||
if [[ -f "package.json" ]]; then
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat root_used_deps.txt root_used_code.txt root_workspace_deps.txt | sort) || echo "")
|
||||
echo "ROOT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
@@ -97,7 +184,8 @@ jobs:
|
||||
chmod -R 755 client
|
||||
cd client
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt ../client_workspace_deps.txt | sort) || echo "")
|
||||
# Filter out false positives
|
||||
UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "")
|
||||
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
@@ -113,7 +201,8 @@ jobs:
|
||||
chmod -R 755 api
|
||||
cd api
|
||||
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt | sort) || echo "")
|
||||
# Exclude dependencies used in scripts, code, and workspace packages
|
||||
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../api_used_deps.txt ../api_used_code.txt ../api_workspace_deps.txt | sort) || echo "")
|
||||
echo "API_UNUSED<<EOF" >> $GITHUB_ENV
|
||||
echo "$UNUSED" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.9-rc1
|
||||
# v0.7.9
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.9-rc1
|
||||
# v0.7.9
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
@@ -108,12 +108,15 @@ class BaseClient {
|
||||
/**
|
||||
* Abstract method to record token usage. Subclasses must implement this method.
|
||||
* If a correction to the token usage is needed, the method should return an object with the corrected token counts.
|
||||
* Should only be used if `recordCollectedUsage` was not used instead.
|
||||
* @param {string} [model]
|
||||
* @param {number} promptTokens
|
||||
* @param {number} completionTokens
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||
async recordTokenUsage({ model, promptTokens, completionTokens }) {
|
||||
logger.debug('[BaseClient] `recordTokenUsage` not implemented.', {
|
||||
model,
|
||||
promptTokens,
|
||||
completionTokens,
|
||||
});
|
||||
@@ -741,9 +744,13 @@ class BaseClient {
|
||||
} else {
|
||||
responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage);
|
||||
completionTokens = responseMessage.tokenCount;
|
||||
await this.recordTokenUsage({
|
||||
usage,
|
||||
promptTokens,
|
||||
completionTokens,
|
||||
model: responseMessage.model,
|
||||
});
|
||||
}
|
||||
|
||||
await this.recordTokenUsage({ promptTokens, completionTokens, usage });
|
||||
}
|
||||
|
||||
if (userMessagePromise) {
|
||||
|
||||
@@ -237,41 +237,9 @@ const formatAgentMessages = (payload) => {
|
||||
return messages;
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats an array of messages for LangChain, making sure all content fields are strings
|
||||
* @param {Array<(HumanMessage|AIMessage|SystemMessage|ToolMessage)>} payload - The array of messages to format.
|
||||
* @returns {Array<(HumanMessage|AIMessage|SystemMessage|ToolMessage)>} - The array of formatted LangChain messages, including ToolMessages for tool calls.
|
||||
*/
|
||||
const formatContentStrings = (payload) => {
|
||||
const messages = [];
|
||||
|
||||
for (const message of payload) {
|
||||
if (typeof message.content === 'string') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Array.isArray(message.content)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reduce text types to a single string, ignore all other types
|
||||
const content = message.content.reduce((acc, curr) => {
|
||||
if (curr.type === ContentTypes.TEXT) {
|
||||
return `${acc}${curr[ContentTypes.TEXT]}\n`;
|
||||
}
|
||||
return acc;
|
||||
}, '');
|
||||
|
||||
message.content = content.trim();
|
||||
}
|
||||
|
||||
return messages;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
formatMessage,
|
||||
formatFromLangChain,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
formatLangChainMessages,
|
||||
};
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
const { mcpToolPattern } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { mcpToolPattern, loadWebSearchAuth } = require('@librechat/api');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const {
|
||||
Tools,
|
||||
EToolResources,
|
||||
loadWebSearchAuth,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { Tools, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const {
|
||||
availableTools,
|
||||
manifestToolMap,
|
||||
|
||||
21
api/cache/cacheConfig.js
vendored
21
api/cache/cacheConfig.js
vendored
@@ -1,5 +1,6 @@
|
||||
const fs = require('fs');
|
||||
const { math, isEnabled } = require('@librechat/api');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
// To ensure that different deployments do not interfere with each other's cache, we use a prefix for the Redis keys.
|
||||
// This prefix is usually the deployment ID, which is often passed to the container or pod as an env var.
|
||||
@@ -15,7 +16,26 @@ if (USE_REDIS && !process.env.REDIS_URI) {
|
||||
throw new Error('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
}
|
||||
|
||||
// Comma-separated list of cache namespaces that should be forced to use in-memory storage
|
||||
// even when Redis is enabled. This allows selective performance optimization for specific caches.
|
||||
const FORCED_IN_MEMORY_CACHE_NAMESPACES = process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES
|
||||
? process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES.split(',').map((key) => key.trim())
|
||||
: [];
|
||||
|
||||
// Validate against CacheKeys enum
|
||||
if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
||||
const validKeys = Object.values(CacheKeys);
|
||||
const invalidKeys = FORCED_IN_MEMORY_CACHE_NAMESPACES.filter((key) => !validKeys.includes(key));
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
throw new Error(
|
||||
`Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: ${invalidKeys.join(', ')}. Valid keys: ${validKeys.join(', ')}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const cacheConfig = {
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES,
|
||||
USE_REDIS,
|
||||
REDIS_URI: process.env.REDIS_URI,
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME,
|
||||
@@ -23,6 +43,7 @@ const cacheConfig = {
|
||||
REDIS_CA: process.env.REDIS_CA ? fs.readFileSync(process.env.REDIS_CA, 'utf8') : null,
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_MAX_LISTENERS: math(process.env.REDIS_MAX_LISTENERS, 40),
|
||||
REDIS_PING_INTERVAL: math(process.env.REDIS_PING_INTERVAL, 0),
|
||||
|
||||
CI: isEnabled(process.env.CI),
|
||||
DEBUG_MEMORY_CACHE: isEnabled(process.env.DEBUG_MEMORY_CACHE),
|
||||
|
||||
49
api/cache/cacheConfig.spec.js
vendored
49
api/cache/cacheConfig.spec.js
vendored
@@ -14,6 +14,8 @@ describe('cacheConfig', () => {
|
||||
delete process.env.REDIS_KEY_PREFIX_VAR;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Clear require cache
|
||||
jest.resetModules();
|
||||
@@ -105,4 +107,51 @@ describe('cacheConfig', () => {
|
||||
expect(cacheConfig.REDIS_CA).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_PING_INTERVAL configuration', () => {
|
||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(0);
|
||||
});
|
||||
|
||||
test('should use provided REDIS_PING_INTERVAL value', () => {
|
||||
process.env.REDIS_PING_INTERVAL = '300';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(300);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
||||
test('should parse comma-separated cache keys correctly', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, STATIC_CONFIG ,MESSAGES ';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([
|
||||
'ROLES',
|
||||
'STATIC_CONFIG',
|
||||
'MESSAGES',
|
||||
]);
|
||||
});
|
||||
|
||||
test('should throw error for invalid cache keys', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'INVALID_KEY,ROLES';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
||||
});
|
||||
|
||||
test('should handle empty string gracefully', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = '';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle undefined env var gracefully', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
7
api/cache/cacheFactory.js
vendored
7
api/cache/cacheFactory.js
vendored
@@ -3,7 +3,7 @@ const { Keyv } = require('keyv');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const ConnectRedis = require('connect-redis').default;
|
||||
const { RedisStore: ConnectRedis } = require('connect-redis');
|
||||
const MemoryStore = require('memorystore')(require('express-session'));
|
||||
const { violationFile } = require('./keyvFiles');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
@@ -16,7 +16,10 @@ const { RedisStore } = require('rate-limit-redis');
|
||||
* @returns {Keyv} Cache instance.
|
||||
*/
|
||||
const standardCache = (namespace, ttl = undefined, fallbackStore = undefined) => {
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
if (
|
||||
cacheConfig.USE_REDIS &&
|
||||
!cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)
|
||||
) {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
|
||||
30
api/cache/cacheFactory.spec.js
vendored
30
api/cache/cacheFactory.spec.js
vendored
@@ -31,6 +31,7 @@ jest.mock('./cacheConfig', () => ({
|
||||
cacheConfig: {
|
||||
USE_REDIS: false,
|
||||
REDIS_KEY_PREFIX: 'test',
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES: [],
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -44,9 +45,7 @@ jest.mock('./keyvFiles', () => ({
|
||||
violationFile: mockViolationFile,
|
||||
}));
|
||||
|
||||
jest.mock('connect-redis', () => ({
|
||||
default: mockConnectRedis,
|
||||
}));
|
||||
jest.mock('connect-redis', () => ({ RedisStore: mockConnectRedis }));
|
||||
|
||||
jest.mock('memorystore', () => jest.fn(() => mockMemoryStore));
|
||||
|
||||
@@ -65,6 +64,7 @@ describe('cacheFactory', () => {
|
||||
// Reset cache config mock
|
||||
cacheConfig.USE_REDIS = false;
|
||||
cacheConfig.REDIS_KEY_PREFIX = 'test';
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = [];
|
||||
});
|
||||
|
||||
describe('redisCache', () => {
|
||||
@@ -118,6 +118,30 @@ describe('cacheFactory', () => {
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace: undefined, ttl: undefined });
|
||||
});
|
||||
|
||||
it('should use fallback when namespace is in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['forced-memory'];
|
||||
const namespace = 'forced-memory';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).not.toHaveBeenCalled();
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
||||
});
|
||||
|
||||
it('should use Redis when namespace is not in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['other-namespace'];
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
||||
});
|
||||
});
|
||||
|
||||
describe('violationCache', () => {
|
||||
|
||||
1
api/cache/getLogStores.js
vendored
1
api/cache/getLogStores.js
vendored
@@ -33,6 +33,7 @@ const namespaces = {
|
||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||
[CacheKeys.MCP_TOOLS]: standardCache(CacheKeys.MCP_TOOLS),
|
||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }),
|
||||
[CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES),
|
||||
|
||||
61
api/cache/redisClients.js
vendored
61
api/cache/redisClients.js
vendored
@@ -1,6 +1,7 @@
|
||||
const IoRedis = require('ioredis');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createClient, createCluster } = require('@keyv/redis');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
|
||||
const GLOBAL_PREFIX_SEPARATOR = '::';
|
||||
|
||||
@@ -25,17 +26,37 @@ if (cacheConfig.USE_REDIS) {
|
||||
? new IoRedis(cacheConfig.REDIS_URI, redisOptions)
|
||||
: new IoRedis.Cluster(cacheConfig.REDIS_URI, { redisOptions });
|
||||
|
||||
// Pinging the Redis server every 5 minutes to keep the connection alive
|
||||
const pingInterval = setInterval(() => ioredisClient.ping(), 5 * 60 * 1000);
|
||||
ioredisClient.on('close', () => clearInterval(pingInterval));
|
||||
ioredisClient.on('end', () => clearInterval(pingInterval));
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error('ioredis client error:', err);
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
ioredisClient.ping();
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
ioredisClient.on('close', clearPingInterval);
|
||||
ioredisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {import('@keyv/redis').RedisClient | import('@keyv/redis').RedisCluster | null} */
|
||||
let keyvRedisClient = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
// ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
// The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
/**
|
||||
* ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
* The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
*/
|
||||
const redisOptions = { username, password, socket: { tls: ca != null, ca } };
|
||||
|
||||
keyvRedisClient =
|
||||
@@ -48,10 +69,28 @@ if (cacheConfig.USE_REDIS) {
|
||||
|
||||
keyvRedisClient.setMaxListeners(cacheConfig.REDIS_MAX_LISTENERS);
|
||||
|
||||
// Pinging the Redis server every 5 minutes to keep the connection alive
|
||||
const keyvPingInterval = setInterval(() => keyvRedisClient.ping(), 5 * 60 * 1000);
|
||||
keyvRedisClient.on('disconnect', () => clearInterval(keyvPingInterval));
|
||||
keyvRedisClient.on('end', () => clearInterval(keyvPingInterval));
|
||||
keyvRedisClient.on('error', (err) => {
|
||||
logger.error('@keyv/redis client error:', err);
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (keyvRedisClient && keyvRedisClient.isReady) {
|
||||
keyvRedisClient.ping();
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
keyvRedisClient.on('disconnect', clearPingInterval);
|
||||
keyvRedisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ioredisClient, keyvRedisClient, GLOBAL_PREFIX_SEPARATOR };
|
||||
|
||||
@@ -21,7 +21,7 @@ const findFileById = async (file_id, options = {}) => {
|
||||
* @param {string} agentId - The agent ID that might grant access
|
||||
* @returns {Promise<Map<string, boolean>>} Map of fileId to access status
|
||||
*/
|
||||
const hasAccessToFilesViaAgent = async (userId, fileIds, agentId) => {
|
||||
const hasAccessToFilesViaAgent = async (userId, fileIds, agentId, checkCollaborative = true) => {
|
||||
const accessMap = new Map();
|
||||
|
||||
// Initialize all files as no access
|
||||
@@ -55,11 +55,11 @@ const hasAccessToFilesViaAgent = async (userId, fileIds, agentId) => {
|
||||
}
|
||||
|
||||
// Agent is globally shared - check if it's collaborative
|
||||
if (!agent.isCollaborative) {
|
||||
if (checkCollaborative && !agent.isCollaborative) {
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Agent is globally shared and collaborative - check which files are actually attached
|
||||
// Check which files are actually attached
|
||||
const attachedFileIds = new Set();
|
||||
if (agent.tool_resources) {
|
||||
for (const [_resourceType, resource] of Object.entries(agent.tool_resources)) {
|
||||
@@ -118,7 +118,12 @@ const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }, option
|
||||
|
||||
// Batch check access for all non-owned files
|
||||
const fileIds = filesToCheck.map((f) => f.file_id);
|
||||
const accessMap = await hasAccessToFilesViaAgent(options.userId, fileIds, options.agentId);
|
||||
const accessMap = await hasAccessToFilesViaAgent(
|
||||
options.userId,
|
||||
fileIds,
|
||||
options.agentId,
|
||||
false,
|
||||
);
|
||||
|
||||
// Filter files based on access
|
||||
const accessibleFiles = filesToCheck.filter((file) => accessMap.get(file.file_id));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.9-rc1",
|
||||
"version": "v0.7.9",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -44,19 +44,21 @@
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.47",
|
||||
"@langchain/core": "^0.3.60",
|
||||
"@langchain/core": "^0.3.62",
|
||||
"@langchain/google-genai": "^0.2.13",
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/openai": "^0.5.18",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.60",
|
||||
"@librechat/agents": "^2.4.68",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
"@modelcontextprotocol/sdk": "^1.17.0",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"compression": "^1.7.4",
|
||||
"connect-redis": "^7.1.0",
|
||||
"compression": "^1.8.1",
|
||||
"connect-redis": "^8.1.0",
|
||||
"cookie": "^0.7.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cors": "^2.8.5",
|
||||
@@ -66,10 +68,11 @@
|
||||
"express": "^4.21.2",
|
||||
"express-mongo-sanitize": "^2.2.0",
|
||||
"express-rate-limit": "^7.4.1",
|
||||
"express-session": "^1.18.1",
|
||||
"express-session": "^1.18.2",
|
||||
"express-static-gzip": "^2.2.0",
|
||||
"file-type": "^18.7.0",
|
||||
"firebase": "^11.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
@@ -87,12 +90,12 @@
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"multer": "^2.0.1",
|
||||
"multer": "^2.0.2",
|
||||
"nanoid": "^3.3.7",
|
||||
"node-fetch": "^2.7.0",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^4.96.2",
|
||||
"openai": "^5.10.1",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^6.5.0",
|
||||
"passport": "^0.6.0",
|
||||
@@ -117,7 +120,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "^29.7.0",
|
||||
"mongodb-memory-server": "^10.1.3",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.1.0"
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ function createServerToolsCallback() {
|
||||
return;
|
||||
}
|
||||
await mcpToolsCache.set(serverName, serverTools);
|
||||
logger.warn(`MCP tools for ${serverName} added to cache.`);
|
||||
logger.debug(`MCP tools for ${serverName} added to cache.`);
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving MCP tools from cache:', error);
|
||||
}
|
||||
@@ -143,7 +143,7 @@ const getAvailableTools = async (req, res) => {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||
const cachedUserTools = await getCachedTools({ userId });
|
||||
const userPlugins = await convertMCPToolsToPlugins(cachedUserTools, customConfig, userId);
|
||||
const userPlugins = convertMCPToolsToPlugins(cachedUserTools, customConfig);
|
||||
|
||||
if (cachedToolsArray && userPlugins) {
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
|
||||
@@ -202,102 +202,23 @@ const getAvailableTools = async (req, res) => {
|
||||
const serverName = parts[parts.length - 1];
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
|
||||
logger.warn(
|
||||
`[getAvailableTools] Processing MCP tool:`,
|
||||
JSON.stringify({
|
||||
pluginKey: plugin.pluginKey,
|
||||
serverName,
|
||||
hasServerConfig: !!serverConfig,
|
||||
hasCustomUserVars: !!serverConfig?.customUserVars,
|
||||
}),
|
||||
);
|
||||
|
||||
if (!serverConfig) {
|
||||
logger.warn(
|
||||
`[getAvailableTools] No server config found for ${serverName}, skipping auth check`,
|
||||
);
|
||||
if (!serverConfig?.customUserVars) {
|
||||
toolsOutput.push(toolToAdd);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle MCP servers with customUserVars (user-level auth required)
|
||||
if (serverConfig.customUserVars) {
|
||||
logger.warn(`[getAvailableTools] Processing user-level MCP server: ${serverName}`);
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
|
||||
// Build authConfig for MCP tools
|
||||
if (customVarKeys.length === 0) {
|
||||
toolToAdd.authConfig = [];
|
||||
toolToAdd.authenticated = true;
|
||||
} else {
|
||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}));
|
||||
|
||||
// Check actual connection status for MCP tools with auth requirements
|
||||
if (userId) {
|
||||
try {
|
||||
const mcpManager = getMCPManager(userId);
|
||||
const connectionStatus = await mcpManager.getUserConnectionStatus(userId, serverName);
|
||||
toolToAdd.authenticated = connectionStatus.connected;
|
||||
logger.warn(`[getAvailableTools] User-level connection status for ${serverName}:`, {
|
||||
connected: connectionStatus.connected,
|
||||
hasConnection: connectionStatus.hasConnection,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[getAvailableTools] Error checking connection status for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
} else {
|
||||
// For non-authenticated requests, default to false
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
} else {
|
||||
// Handle app-level MCP servers (no auth required)
|
||||
logger.warn(`[getAvailableTools] Processing app-level MCP server: ${serverName}`);
|
||||
toolToAdd.authConfig = [];
|
||||
|
||||
// Check if the app-level connection is active
|
||||
try {
|
||||
const mcpManager = getMCPManager();
|
||||
const allConnections = mcpManager.getAllConnections();
|
||||
logger.warn(`[getAvailableTools] All app-level connections:`, {
|
||||
connectionNames: Array.from(allConnections.keys()),
|
||||
serverName,
|
||||
});
|
||||
|
||||
const appConnection = mcpManager.getConnection(serverName);
|
||||
logger.warn(`[getAvailableTools] Checking app-level connection for ${serverName}:`, {
|
||||
hasConnection: !!appConnection,
|
||||
connectionState: appConnection?.getConnectionState?.(),
|
||||
});
|
||||
|
||||
if (appConnection) {
|
||||
const connectionState = appConnection.getConnectionState();
|
||||
logger.warn(`[getAvailableTools] App-level connection status for ${serverName}:`, {
|
||||
connectionState,
|
||||
hasConnection: !!appConnection,
|
||||
});
|
||||
|
||||
// For app-level connections, consider them authenticated if they're in 'connected' state
|
||||
// This is more reliable than isConnected() which does network calls
|
||||
toolToAdd.authenticated = connectionState === 'connected';
|
||||
logger.warn(`[getAvailableTools] Final authenticated status for ${serverName}:`, {
|
||||
authenticated: toolToAdd.authenticated,
|
||||
connectionState,
|
||||
});
|
||||
} else {
|
||||
logger.warn(`[getAvailableTools] No app-level connection found for ${serverName}`);
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[getAvailableTools] Error checking app-level connection status for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
|
||||
toolsOutput.push(toolToAdd);
|
||||
@@ -320,7 +241,7 @@ const getAvailableTools = async (req, res) => {
|
||||
* @param {Object} customConfig - Custom configuration for MCP servers
|
||||
* @returns {Array} Array of plugin objects
|
||||
*/
|
||||
async function convertMCPToolsToPlugins(functionTools, customConfig, userId = null) {
|
||||
function convertMCPToolsToPlugins(functionTools, customConfig) {
|
||||
const plugins = [];
|
||||
|
||||
for (const [toolKey, toolData] of Object.entries(functionTools)) {
|
||||
@@ -332,19 +253,19 @@ async function convertMCPToolsToPlugins(functionTools, customConfig, userId = nu
|
||||
const parts = toolKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
|
||||
const plugin = {
|
||||
name: parts[0], // Use the tool name without server suffix
|
||||
pluginKey: toolKey,
|
||||
description: functionData.description || '',
|
||||
authenticated: false, // Default to false, will be updated based on connection status
|
||||
icon: undefined,
|
||||
authenticated: true,
|
||||
icon: serverConfig?.iconPath,
|
||||
};
|
||||
|
||||
// Build authConfig for MCP tools
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
if (!serverConfig?.customUserVars) {
|
||||
plugin.authConfig = [];
|
||||
plugin.authenticated = true; // No auth required
|
||||
plugins.push(plugin);
|
||||
continue;
|
||||
}
|
||||
@@ -352,30 +273,12 @@ async function convertMCPToolsToPlugins(functionTools, customConfig, userId = nu
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
if (customVarKeys.length === 0) {
|
||||
plugin.authConfig = [];
|
||||
plugin.authenticated = true; // No auth required
|
||||
} else {
|
||||
plugin.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}));
|
||||
|
||||
// Check actual connection status for MCP tools with auth requirements
|
||||
if (userId) {
|
||||
try {
|
||||
const mcpManager = getMCPManager(userId);
|
||||
const connectionStatus = await mcpManager.getUserConnectionStatus(userId, serverName);
|
||||
plugin.authenticated = connectionStatus.connected;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[convertMCPToolsToPlugins] Error checking connection status for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
plugin.authenticated = false;
|
||||
}
|
||||
} else {
|
||||
plugin.authenticated = false;
|
||||
}
|
||||
}
|
||||
|
||||
plugins.push(plugin);
|
||||
|
||||
89
api/server/controllers/PluginController.spec.js
Normal file
89
api/server/controllers/PluginController.spec.js
Normal file
@@ -0,0 +1,89 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
// Mock the dependencies
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCustomConfig: jest.fn(),
|
||||
getCachedTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/ToolService', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
getMCPManager: jest.fn(() => ({
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
})),
|
||||
getFlowStateManager: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/app/clients/tools', () => ({
|
||||
availableTools: [],
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(),
|
||||
}));
|
||||
|
||||
// Import the actual module with the function we want to test
|
||||
const { getAvailableTools } = require('./PluginController');
|
||||
|
||||
describe('PluginController', () => {
|
||||
describe('plugin.icon behavior', () => {
|
||||
let mockReq, mockRes, mockCache;
|
||||
|
||||
const callGetAvailableToolsWithMCPServer = async (mcpServers) => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue({ mcpServers });
|
||||
|
||||
const functionTools = {
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: {
|
||||
function: { name: 'test-tool', description: 'A test tool' },
|
||||
},
|
||||
};
|
||||
getCachedTools.mockResolvedValueOnce(functionTools);
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
return responseData.find((tool) => tool.name === 'test-tool');
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockReq = { user: { id: 'test-user-id' } };
|
||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
});
|
||||
|
||||
it('should set plugin.icon when iconPath is defined', async () => {
|
||||
const mcpServers = {
|
||||
'test-server': {
|
||||
iconPath: '/path/to/icon.png',
|
||||
},
|
||||
};
|
||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
||||
expect(testTool.icon).toBe('/path/to/icon.png');
|
||||
});
|
||||
|
||||
it('should set plugin.icon to undefined when iconPath is not defined', async () => {
|
||||
const mcpServers = {
|
||||
'test-server': {},
|
||||
};
|
||||
const testTool = await callGetAvailableToolsWithMCPServer(mcpServers);
|
||||
expect(testTool.icon).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,5 @@
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
FileSources,
|
||||
webSearchKeys,
|
||||
extractWebSearchEnvVars,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { webSearchKeys, extractWebSearchEnvVars } = require('@librechat/api');
|
||||
const {
|
||||
getFiles,
|
||||
updateUser,
|
||||
@@ -20,6 +14,7 @@ const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/service
|
||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||
const { Tools, Constants, FileSources } = require('librechat-data-provider');
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
const { Transaction, Balance, User } = require('~/db/models');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
@@ -180,14 +175,12 @@ const updateUserPluginsController = async (req, res) => {
|
||||
try {
|
||||
const mcpManager = getMCPManager(user.id);
|
||||
if (mcpManager) {
|
||||
// Extract server name from pluginKey (e.g., "mcp_myserver" -> "myserver")
|
||||
// Extract server name from pluginKey (format: "mcp_<serverName>")
|
||||
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||
|
||||
logger.info(
|
||||
`[updateUserPluginsController] Disconnecting MCP connection for user ${user.id} and server ${serverName} after plugin auth update for ${pluginKey}.`,
|
||||
`[updateUserPluginsController] Disconnecting MCP server ${serverName} for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
||||
);
|
||||
// COMMENTED OUT: Don't kill the server connection on revoke
|
||||
// await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
}
|
||||
} catch (disconnectError) {
|
||||
logger.error(
|
||||
|
||||
@@ -8,15 +8,16 @@ const {
|
||||
Tokenizer,
|
||||
checkAccess,
|
||||
memoryInstructions,
|
||||
formatContentStrings,
|
||||
createMemoryProcessor,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Callback,
|
||||
Providers,
|
||||
GraphEvents,
|
||||
TitleMethod,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
formatContentStrings,
|
||||
getTokenCountForMessage,
|
||||
createMetadataAggregator,
|
||||
} = require('@librechat/agents');
|
||||
@@ -26,7 +27,6 @@ const {
|
||||
VisionModes,
|
||||
ContentTypes,
|
||||
EModelEndpoint,
|
||||
KnownEndpoints,
|
||||
PermissionTypes,
|
||||
isAgentsEndpoint,
|
||||
AgentCapabilities,
|
||||
@@ -71,13 +71,15 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
||||
if (isAgentsEndpoint(endpoint)) {
|
||||
return { model: undefined };
|
||||
} else if (endpoint === EModelEndpoint.bedrock) {
|
||||
return bedrockInputSchema.parse(agent.model_parameters);
|
||||
const parsedValues = bedrockInputSchema.parse(agent.model_parameters);
|
||||
if (parsedValues.thinking == null) {
|
||||
parsedValues.thinking = false;
|
||||
}
|
||||
return parsedValues;
|
||||
}
|
||||
return req.body.endpointOption.model_parameters;
|
||||
};
|
||||
|
||||
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
||||
|
||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||
|
||||
function createTokenCounter(encoding) {
|
||||
@@ -718,9 +720,6 @@ class AgentClient extends BaseClient {
|
||||
this.indexTokenCountMap,
|
||||
toolSet,
|
||||
);
|
||||
if (legacyContentEndpoints.has(this.options.agent.endpoint?.toLowerCase())) {
|
||||
initialMessages = formatContentStrings(initialMessages);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -736,6 +735,9 @@ class AgentClient extends BaseClient {
|
||||
if (i > 0) {
|
||||
this.model = agent.model_parameters.model;
|
||||
}
|
||||
if (i > 0 && config.signal == null) {
|
||||
config.signal = abortController.signal;
|
||||
}
|
||||
if (agent.recursion_limit && typeof agent.recursion_limit === 'number') {
|
||||
config.recursionLimit = agent.recursion_limit;
|
||||
}
|
||||
@@ -784,6 +786,9 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
let messages = _messages;
|
||||
if (agent.useLegacyContent === true) {
|
||||
messages = formatContentStrings(messages);
|
||||
}
|
||||
if (
|
||||
agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
||||
'prompt-caching',
|
||||
@@ -1012,25 +1017,40 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||
const { req, res, agent } = this.options;
|
||||
const endpoint = agent.endpoint;
|
||||
let endpoint = agent.endpoint;
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
let clientOptions = {
|
||||
maxTokens: 75,
|
||||
model: agent.model_parameters.model,
|
||||
model: agent.model || agent.model_parameters.model,
|
||||
};
|
||||
|
||||
const { getOptions, overrideProvider, customEndpointConfig } =
|
||||
await getProviderConfig(endpoint);
|
||||
let titleProviderConfig = await getProviderConfig(endpoint);
|
||||
|
||||
/** @type {TEndpoint | undefined} */
|
||||
const endpointConfig = req.app.locals[endpoint] ?? customEndpointConfig;
|
||||
const endpointConfig =
|
||||
req.app.locals.all ?? req.app.locals[endpoint] ?? titleProviderConfig.customEndpointConfig;
|
||||
if (!endpointConfig) {
|
||||
logger.warn(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config',
|
||||
);
|
||||
}
|
||||
|
||||
if (endpointConfig?.titleEndpoint && endpointConfig.titleEndpoint !== endpoint) {
|
||||
try {
|
||||
titleProviderConfig = await getProviderConfig(endpointConfig.titleEndpoint);
|
||||
endpoint = endpointConfig.titleEndpoint;
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[api/server/controllers/agents/client.js #titleConvo] Error getting title endpoint config for ${endpointConfig.titleEndpoint}, falling back to default`,
|
||||
error,
|
||||
);
|
||||
// Fall back to original provider config
|
||||
endpoint = agent.endpoint;
|
||||
titleProviderConfig = await getProviderConfig(endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
endpointConfig &&
|
||||
endpointConfig.titleModel &&
|
||||
@@ -1039,7 +1059,7 @@ class AgentClient extends BaseClient {
|
||||
clientOptions.model = endpointConfig.titleModel;
|
||||
}
|
||||
|
||||
const options = await getOptions({
|
||||
const options = await titleProviderConfig.getOptions({
|
||||
req,
|
||||
res,
|
||||
optionsOnly: true,
|
||||
@@ -1048,7 +1068,7 @@ class AgentClient extends BaseClient {
|
||||
endpointOption: { model_parameters: clientOptions },
|
||||
});
|
||||
|
||||
let provider = options.provider ?? overrideProvider ?? agent.provider;
|
||||
let provider = options.provider ?? titleProviderConfig.overrideProvider ?? agent.provider;
|
||||
if (
|
||||
endpoint === EModelEndpoint.azureOpenAI &&
|
||||
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||
@@ -1081,16 +1101,23 @@ class AgentClient extends BaseClient {
|
||||
),
|
||||
);
|
||||
|
||||
if (provider === Providers.GOOGLE) {
|
||||
if (
|
||||
provider === Providers.GOOGLE &&
|
||||
(endpointConfig?.titleMethod === TitleMethod.FUNCTIONS ||
|
||||
endpointConfig?.titleMethod === TitleMethod.STRUCTURED)
|
||||
) {
|
||||
clientOptions.json = true;
|
||||
}
|
||||
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
provider,
|
||||
clientOptions,
|
||||
inputText: text,
|
||||
contentParts: this.contentParts,
|
||||
clientOptions,
|
||||
titleMethod: endpointConfig?.titleMethod,
|
||||
titlePrompt: endpointConfig?.titlePrompt,
|
||||
titlePromptTemplate: endpointConfig?.titlePromptTemplate,
|
||||
chainOptions: {
|
||||
signal: abortController.signal,
|
||||
callbacks: [
|
||||
@@ -1138,8 +1165,52 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
/** Silent method, as `recordCollectedUsage` is used instead */
|
||||
async recordTokenUsage() {}
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {number} params.promptTokens
|
||||
* @param {number} params.completionTokens
|
||||
* @param {OpenAIUsageMetadata} [params.usage]
|
||||
* @param {string} [params.model]
|
||||
* @param {string} [params.context='message']
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async recordTokenUsage({ model, promptTokens, completionTokens, usage, context = 'message' }) {
|
||||
try {
|
||||
await spendTokens(
|
||||
{
|
||||
model,
|
||||
context,
|
||||
conversationId: this.conversationId,
|
||||
user: this.user ?? this.options.req.user?.id,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
},
|
||||
{ promptTokens, completionTokens },
|
||||
);
|
||||
|
||||
if (
|
||||
usage &&
|
||||
typeof usage === 'object' &&
|
||||
'reasoning_tokens' in usage &&
|
||||
typeof usage.reasoning_tokens === 'number'
|
||||
) {
|
||||
await spendTokens(
|
||||
{
|
||||
model,
|
||||
context: 'reasoning',
|
||||
conversationId: this.conversationId,
|
||||
user: this.user ?? this.options.req.user?.id,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
},
|
||||
{ completionTokens: usage.reasoning_tokens },
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #recordTokenUsage] Error recording token usage',
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
getEncoding() {
|
||||
return 'o200k_base';
|
||||
|
||||
730
api/server/controllers/agents/client.test.js
Normal file
730
api/server/controllers/agents/client.test.js
Normal file
@@ -0,0 +1,730 @@
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const AgentClient = require('./client');
|
||||
|
||||
jest.mock('@librechat/agents', () => ({
|
||||
...jest.requireActual('@librechat/agents'),
|
||||
createMetadataAggregator: () => ({
|
||||
handleLLMEnd: jest.fn(),
|
||||
collected: [],
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('AgentClient - titleConvo', () => {
|
||||
let client;
|
||||
let mockRun;
|
||||
let mockReq;
|
||||
let mockRes;
|
||||
let mockAgent;
|
||||
let mockOptions;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Mock run object
|
||||
mockRun = {
|
||||
generateTitle: jest.fn().mockResolvedValue({
|
||||
title: 'Generated Title',
|
||||
}),
|
||||
};
|
||||
|
||||
// Mock agent - with both endpoint and provider
|
||||
mockAgent = {
|
||||
id: 'agent-123',
|
||||
endpoint: EModelEndpoint.openAI, // Use a valid provider as endpoint for getProviderConfig
|
||||
provider: EModelEndpoint.openAI, // Add provider property
|
||||
model_parameters: {
|
||||
model: 'gpt-4',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock request and response
|
||||
mockReq = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
// Match the agent endpoint
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titlePrompt: 'Custom title prompt',
|
||||
titleMethod: 'structured',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
},
|
||||
},
|
||||
},
|
||||
user: {
|
||||
id: 'user-123',
|
||||
},
|
||||
body: {
|
||||
model: 'gpt-4',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
key: null,
|
||||
},
|
||||
};
|
||||
|
||||
mockRes = {};
|
||||
|
||||
// Mock options
|
||||
mockOptions = {
|
||||
req: mockReq,
|
||||
res: mockRes,
|
||||
agent: mockAgent,
|
||||
endpointTokenConfig: {},
|
||||
};
|
||||
|
||||
// Create client instance
|
||||
client = new AgentClient(mockOptions);
|
||||
client.run = mockRun;
|
||||
client.responseMessageId = 'response-123';
|
||||
client.conversationId = 'convo-123';
|
||||
client.contentParts = [{ type: 'text', text: 'Test content' }];
|
||||
client.recordCollectedUsage = jest.fn().mockResolvedValue(); // Mock as async function that resolves
|
||||
});
|
||||
|
||||
describe('titleConvo method', () => {
|
||||
it('should throw error if run is not initialized', async () => {
|
||||
client.run = null;
|
||||
|
||||
await expect(
|
||||
client.titleConvo({ text: 'Test', abortController: new AbortController() }),
|
||||
).rejects.toThrow('Run not initialized');
|
||||
});
|
||||
|
||||
it('should use titlePrompt from endpoint config', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titlePrompt: 'Custom title prompt',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use titlePromptTemplate from endpoint config', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use titleMethod from endpoint config', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.OPENAI,
|
||||
titleMethod: 'structured',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use titleModel from endpoint config when provided', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Check that generateTitle was called with correct clientOptions
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-3.5-turbo');
|
||||
});
|
||||
|
||||
it('should handle missing endpoint config gracefully', async () => {
|
||||
// Remove endpoint config
|
||||
mockReq.app.locals[EModelEndpoint.openAI] = undefined;
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titlePrompt: undefined,
|
||||
titlePromptTemplate: undefined,
|
||||
titleMethod: undefined,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use agent model when titleModel is not provided', async () => {
|
||||
// Remove titleModel from config
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleModel;
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-4'); // Should use agent's model
|
||||
});
|
||||
|
||||
it('should not use titleModel when it equals CURRENT_MODEL constant', async () => {
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titleModel = Constants.CURRENT_MODEL;
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-4'); // Should use agent's model
|
||||
});
|
||||
|
||||
it('should pass all required parameters to generateTitle', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith({
|
||||
provider: expect.any(String),
|
||||
inputText: text,
|
||||
contentParts: client.contentParts,
|
||||
clientOptions: expect.objectContaining({
|
||||
model: 'gpt-3.5-turbo',
|
||||
}),
|
||||
titlePrompt: 'Custom title prompt',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
titleMethod: 'structured',
|
||||
chainOptions: expect.objectContaining({
|
||||
signal: abortController.signal,
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should record collected usage after title generation', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(client.recordCollectedUsage).toHaveBeenCalledWith({
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'title',
|
||||
collectedUsage: expect.any(Array),
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the generated title', async () => {
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
const result = await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(result).toBe('Generated Title');
|
||||
});
|
||||
|
||||
it('should handle errors gracefully and return undefined', async () => {
|
||||
mockRun.generateTitle.mockRejectedValue(new Error('Title generation failed'));
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
const result = await client.titleConvo({ text, abortController });
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should pass titleEndpoint configuration to generateTitle', async () => {
|
||||
// Mock the API key just for this test
|
||||
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
||||
process.env.ANTHROPIC_API_KEY = 'test-api-key';
|
||||
|
||||
// Add titleEndpoint to the config
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titleEndpoint = EModelEndpoint.anthropic;
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titleMethod = 'structured';
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titlePrompt = 'Custom title prompt';
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate = 'Custom template';
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify generateTitle was called with the custom configuration
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titleMethod: 'structured',
|
||||
provider: Providers.ANTHROPIC,
|
||||
titlePrompt: 'Custom title prompt',
|
||||
titlePromptTemplate: 'Custom template',
|
||||
}),
|
||||
);
|
||||
|
||||
// Restore the original API key
|
||||
if (originalApiKey) {
|
||||
process.env.ANTHROPIC_API_KEY = originalApiKey;
|
||||
} else {
|
||||
delete process.env.ANTHROPIC_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
it('should use all config when endpoint config is missing', async () => {
|
||||
// Remove endpoint-specific config
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleModel;
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePrompt;
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titleMethod;
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate;
|
||||
|
||||
// Set 'all' config
|
||||
mockReq.app.locals.all = {
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titlePrompt: 'All config title prompt',
|
||||
titleMethod: 'completion',
|
||||
titlePromptTemplate: 'All config template: {{content}}',
|
||||
};
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify generateTitle was called with 'all' config values
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titleMethod: 'completion',
|
||||
titlePrompt: 'All config title prompt',
|
||||
titlePromptTemplate: 'All config template: {{content}}',
|
||||
}),
|
||||
);
|
||||
|
||||
// Check that the model was set from 'all' config
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-4o-mini');
|
||||
});
|
||||
|
||||
it('should prioritize all config over endpoint config for title settings', async () => {
|
||||
// Set both endpoint and 'all' config
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titleModel = 'gpt-3.5-turbo';
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titlePrompt = 'Endpoint title prompt';
|
||||
mockReq.app.locals[EModelEndpoint.openAI].titleMethod = 'structured';
|
||||
// Remove titlePromptTemplate from endpoint config to test fallback
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI].titlePromptTemplate;
|
||||
|
||||
mockReq.app.locals.all = {
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titlePrompt: 'All config title prompt',
|
||||
titleMethod: 'completion',
|
||||
titlePromptTemplate: 'All config template',
|
||||
};
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify 'all' config takes precedence over endpoint config
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titleMethod: 'completion',
|
||||
titlePrompt: 'All config title prompt',
|
||||
titlePromptTemplate: 'All config template',
|
||||
}),
|
||||
);
|
||||
|
||||
// Check that the model was set from 'all' config
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-4o-mini');
|
||||
});
|
||||
|
||||
it('should use all config with titleEndpoint and verify provider switch', async () => {
|
||||
// Mock the API key for the titleEndpoint provider
|
||||
const originalApiKey = process.env.ANTHROPIC_API_KEY;
|
||||
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
||||
|
||||
// Remove endpoint-specific config to test 'all' config
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI];
|
||||
|
||||
// Set comprehensive 'all' config with all new title options
|
||||
mockReq.app.locals.all = {
|
||||
titleConvo: true,
|
||||
titleModel: 'claude-3-haiku-20240307',
|
||||
titleMethod: 'completion', // Testing the new default method
|
||||
titlePrompt: 'Generate a concise, descriptive title for this conversation',
|
||||
titlePromptTemplate: 'Conversation summary: {{content}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic, // Should switch provider to Anthropic
|
||||
};
|
||||
|
||||
const text = 'Test conversation about AI and machine learning';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify all config values were used
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.ANTHROPIC, // Critical: Verify provider switched to Anthropic
|
||||
titleMethod: 'completion',
|
||||
titlePrompt: 'Generate a concise, descriptive title for this conversation',
|
||||
titlePromptTemplate: 'Conversation summary: {{content}}',
|
||||
inputText: text,
|
||||
contentParts: client.contentParts,
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify the model was set from 'all' config
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('claude-3-haiku-20240307');
|
||||
|
||||
// Verify other client options are set correctly
|
||||
expect(generateTitleCall.clientOptions).toMatchObject({
|
||||
model: 'claude-3-haiku-20240307',
|
||||
// Note: Anthropic's getOptions may set its own maxTokens value
|
||||
});
|
||||
|
||||
// Restore the original API key
|
||||
if (originalApiKey) {
|
||||
process.env.ANTHROPIC_API_KEY = originalApiKey;
|
||||
} else {
|
||||
delete process.env.ANTHROPIC_API_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
it('should test all titleMethod options from all config', async () => {
|
||||
// Test each titleMethod: 'completion', 'functions', 'structured'
|
||||
const titleMethods = ['completion', 'functions', 'structured'];
|
||||
|
||||
for (const method of titleMethods) {
|
||||
// Clear previous calls
|
||||
mockRun.generateTitle.mockClear();
|
||||
|
||||
// Remove endpoint config
|
||||
delete mockReq.app.locals[EModelEndpoint.openAI];
|
||||
|
||||
// Set 'all' config with specific titleMethod
|
||||
mockReq.app.locals.all = {
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: method,
|
||||
titlePrompt: `Testing ${method} method`,
|
||||
titlePromptTemplate: `Template for ${method}: {{content}}`,
|
||||
};
|
||||
|
||||
const text = `Test conversation for ${method} method`;
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify the correct titleMethod was used
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
titleMethod: method,
|
||||
titlePrompt: `Testing ${method} method`,
|
||||
titlePromptTemplate: `Template for ${method}: {{content}}`,
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
describe('Azure-specific title generation', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Save original environment variables
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Mock Azure API keys
|
||||
process.env.AZURE_OPENAI_API_KEY = 'test-azure-key';
|
||||
process.env.AZURE_API_KEY = 'test-azure-key';
|
||||
process.env.EASTUS_API_KEY = 'test-eastus-key';
|
||||
process.env.EASTUS2_API_KEY = 'test-eastus2-key';
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore environment variables
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should use OPENAI provider for Azure serverless endpoints', async () => {
|
||||
// Set up Azure endpoint with serverless config
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
||||
titleConvo: true,
|
||||
titleModel: 'grok-3',
|
||||
titleMethod: 'completion',
|
||||
titlePrompt: 'Azure serverless title prompt',
|
||||
streamRate: 35,
|
||||
modelGroupMap: {
|
||||
'grok-3': {
|
||||
group: 'Azure AI Foundry',
|
||||
deploymentName: 'grok-3',
|
||||
},
|
||||
},
|
||||
groupMap: {
|
||||
'Azure AI Foundry': {
|
||||
apiKey: '${AZURE_API_KEY}',
|
||||
baseURL: 'https://test.services.ai.azure.com/models',
|
||||
version: '2024-05-01-preview',
|
||||
serverless: true,
|
||||
models: {
|
||||
'grok-3': {
|
||||
deploymentName: 'grok-3',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'grok-3';
|
||||
|
||||
const text = 'Test Azure serverless conversation';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify provider was switched to OPENAI for serverless
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.OPENAI, // Should be OPENAI for serverless
|
||||
titleMethod: 'completion',
|
||||
titlePrompt: 'Azure serverless title prompt',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use AZURE provider for Azure endpoints with instanceName', async () => {
|
||||
// Set up Azure endpoint
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Azure instance title prompt',
|
||||
streamRate: 35,
|
||||
modelGroupMap: {
|
||||
'gpt-4o': {
|
||||
group: 'eastus',
|
||||
deploymentName: 'gpt-4o',
|
||||
},
|
||||
},
|
||||
groupMap: {
|
||||
eastus: {
|
||||
apiKey: '${EASTUS_API_KEY}',
|
||||
instanceName: 'region-instance',
|
||||
version: '2024-02-15-preview',
|
||||
models: {
|
||||
'gpt-4o': {
|
||||
deploymentName: 'gpt-4o',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'gpt-4o';
|
||||
|
||||
const text = 'Test Azure instance conversation';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify provider remains AZURE with instanceName
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.AZURE,
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Azure instance title prompt',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle Azure titleModel with CURRENT_MODEL constant', async () => {
|
||||
// Set up Azure endpoint
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.model_parameters.model = 'gpt-4o-latest';
|
||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
||||
titleConvo: true,
|
||||
titleModel: Constants.CURRENT_MODEL,
|
||||
titleMethod: 'functions',
|
||||
streamRate: 35,
|
||||
modelGroupMap: {
|
||||
'gpt-4o-latest': {
|
||||
group: 'region-eastus',
|
||||
deploymentName: 'gpt-4o-mini',
|
||||
version: '2024-02-15-preview',
|
||||
},
|
||||
},
|
||||
groupMap: {
|
||||
'region-eastus': {
|
||||
apiKey: '${EASTUS2_API_KEY}',
|
||||
instanceName: 'test-instance',
|
||||
version: '2024-12-01-preview',
|
||||
models: {
|
||||
'gpt-4o-latest': {
|
||||
deploymentName: 'gpt-4o-mini',
|
||||
version: '2024-02-15-preview',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'gpt-4o-latest';
|
||||
|
||||
const text = 'Test Azure current model';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify it uses the correct model when titleModel is CURRENT_MODEL
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
// When CURRENT_MODEL is used with Azure, the model gets mapped to the deployment name
|
||||
// In this case, 'gpt-4o-latest' is mapped to 'gpt-4o-mini' deployment
|
||||
expect(generateTitleCall.clientOptions.model).toBe('gpt-4o-mini');
|
||||
// Also verify that CURRENT_MODEL constant was not passed as the model
|
||||
expect(generateTitleCall.clientOptions.model).not.toBe(Constants.CURRENT_MODEL);
|
||||
});
|
||||
|
||||
it('should handle Azure with multiple model groups', async () => {
|
||||
// Set up Azure endpoint
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.app.locals[EModelEndpoint.azureOpenAI] = {
|
||||
titleConvo: true,
|
||||
titleModel: 'o1-mini',
|
||||
titleMethod: 'completion',
|
||||
streamRate: 35,
|
||||
modelGroupMap: {
|
||||
'gpt-4o': {
|
||||
group: 'eastus',
|
||||
deploymentName: 'gpt-4o',
|
||||
},
|
||||
'o1-mini': {
|
||||
group: 'region-eastus',
|
||||
deploymentName: 'o1-mini',
|
||||
},
|
||||
'codex-mini': {
|
||||
group: 'codex-mini',
|
||||
deploymentName: 'codex-mini',
|
||||
},
|
||||
},
|
||||
groupMap: {
|
||||
eastus: {
|
||||
apiKey: '${EASTUS_API_KEY}',
|
||||
instanceName: 'region-eastus',
|
||||
version: '2024-02-15-preview',
|
||||
models: {
|
||||
'gpt-4o': {
|
||||
deploymentName: 'gpt-4o',
|
||||
},
|
||||
},
|
||||
},
|
||||
'region-eastus': {
|
||||
apiKey: '${EASTUS2_API_KEY}',
|
||||
instanceName: 'region-eastus2',
|
||||
version: '2024-12-01-preview',
|
||||
models: {
|
||||
'o1-mini': {
|
||||
deploymentName: 'o1-mini',
|
||||
},
|
||||
},
|
||||
},
|
||||
'codex-mini': {
|
||||
apiKey: '${AZURE_API_KEY}',
|
||||
baseURL: 'https://example.cognitiveservices.azure.com/openai/',
|
||||
version: '2025-04-01-preview',
|
||||
serverless: true,
|
||||
models: {
|
||||
'codex-mini': {
|
||||
deploymentName: 'codex-mini',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'o1-mini';
|
||||
|
||||
const text = 'Test Azure multi-group conversation';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify correct model and provider are used
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.AZURE,
|
||||
titleMethod: 'completion',
|
||||
}),
|
||||
);
|
||||
|
||||
const generateTitleCall = mockRun.generateTitle.mock.calls[0][0];
|
||||
expect(generateTitleCall.clientOptions.model).toBe('o1-mini');
|
||||
expect(generateTitleCall.clientOptions.maxTokens).toBeUndefined(); // o1 models shouldn't have maxTokens
|
||||
});
|
||||
|
||||
it('should use all config as fallback for Azure endpoints', async () => {
|
||||
// Set up Azure endpoint with minimal config
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'gpt-4';
|
||||
|
||||
// Remove Azure-specific config
|
||||
delete mockReq.app.locals[EModelEndpoint.azureOpenAI];
|
||||
|
||||
// Set 'all' config as fallback with a serverless Azure config
|
||||
mockReq.app.locals.all = {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Fallback title prompt from all config',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
modelGroupMap: {
|
||||
'gpt-4': {
|
||||
group: 'default-group',
|
||||
deploymentName: 'gpt-4',
|
||||
},
|
||||
},
|
||||
groupMap: {
|
||||
'default-group': {
|
||||
apiKey: '${AZURE_API_KEY}',
|
||||
baseURL: 'https://default.openai.azure.com/',
|
||||
version: '2024-02-15-preview',
|
||||
serverless: true,
|
||||
models: {
|
||||
'gpt-4': {
|
||||
deploymentName: 'gpt-4',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const text = 'Test Azure with all config fallback';
|
||||
const abortController = new AbortController();
|
||||
|
||||
await client.titleConvo({ text, abortController });
|
||||
|
||||
// Verify all config is used
|
||||
expect(mockRun.generateTitle).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
provider: Providers.OPENAI, // Should be OPENAI when no instanceName
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Fallback title prompt from all config',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,14 +1,13 @@
|
||||
const { nanoid } = require('nanoid');
|
||||
const { EnvVar } = require('@librechat/agents');
|
||||
const { checkAccess } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { checkAccess, loadWebSearchAuth } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
AuthType,
|
||||
Permissions,
|
||||
ToolCallTypes,
|
||||
PermissionTypes,
|
||||
loadWebSearchAuth,
|
||||
} = require('librechat-data-provider');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
|
||||
@@ -106,7 +106,7 @@ router.get('/', async function (req, res) {
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = {
|
||||
customUserVars: serverConfig?.customUserVars || {},
|
||||
requiresOAuth: req.app.locals.mcpOAuthRequirements?.[serverName] || false,
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { setCachedTools, getCachedTools, loadCustomConfig } = require('~/server/services/Config');
|
||||
const { getUserPluginAuthValueByPlugin } = require('~/server/services/PluginService');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getLogStores } = require('~/cache');
|
||||
@@ -120,9 +120,73 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager);
|
||||
logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route');
|
||||
|
||||
// For system-level OAuth, we need to store the tokens and retry the connection
|
||||
if (flowState.userId === 'system') {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
// Try to establish the MCP connection with the new tokens
|
||||
try {
|
||||
const mcpManager = getMCPManager(flowState.userId);
|
||||
logger.debug(`[MCP OAuth] Attempting to reconnect ${serverName} with new OAuth tokens`);
|
||||
|
||||
// For user-level OAuth, try to establish the connection
|
||||
if (flowState.userId !== 'system') {
|
||||
// We need to get the user object - in this case we'll need to reconstruct it
|
||||
const user = { id: flowState.userId };
|
||||
|
||||
// Try to establish connection with the new tokens
|
||||
const userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`[MCP OAuth] Successfully reconnected ${serverName} for user ${flowState.userId}`,
|
||||
);
|
||||
|
||||
// Fetch and cache tools now that we have a successful connection
|
||||
const userTools = (await getCachedTools({ userId: flowState.userId })) || {};
|
||||
|
||||
// Remove any old tools from this server in the user's cache
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Add the new tools from this server
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Save the updated user tool cache
|
||||
await setCachedTools(userTools, { userId: flowState.userId });
|
||||
|
||||
logger.debug(
|
||||
`[MCP OAuth] Cached ${tools.length} tools for ${serverName} user ${flowState.userId}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
}
|
||||
} catch (error) {
|
||||
// Don't fail the OAuth callback if reconnection fails - the tokens are still saved
|
||||
logger.warn(
|
||||
`[MCP OAuth] Failed to reconnect ${serverName} after OAuth, but tokens are saved:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
|
||||
/** ID of the flow that the tool/connection is waiting for */
|
||||
@@ -205,9 +269,206 @@ router.get('/oauth/status/:flowId', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Cancel OAuth flow
|
||||
* This endpoint cancels a pending OAuth flow
|
||||
*/
|
||||
router.post('/oauth/cancel/:serverName', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP OAuth Cancel] Cancelling OAuth flow for ${serverName} by user ${user.id}`);
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
|
||||
// Generate the flow ID for this user/server combination
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
|
||||
// Check if flow exists
|
||||
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
|
||||
|
||||
if (!flowState) {
|
||||
logger.debug(`[MCP OAuth Cancel] No active flow found for ${serverName}`);
|
||||
return res.json({
|
||||
success: true,
|
||||
message: 'No active OAuth flow to cancel',
|
||||
});
|
||||
}
|
||||
|
||||
// Cancel the flow by marking it as failed
|
||||
await flowManager.completeFlow(flowId, 'mcp_oauth', null, 'User cancelled OAuth flow');
|
||||
|
||||
logger.info(`[MCP OAuth Cancel] Successfully cancelled OAuth flow for ${serverName}`);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: `OAuth flow for ${serverName} cancelled successfully`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP OAuth Cancel] Failed to cancel OAuth flow', error);
|
||||
res.status(500).json({ error: 'Failed to cancel OAuth flow' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Reinitialize MCP server
|
||||
* This endpoint allows reinitializing a specific MCP server
|
||||
*/
|
||||
router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP Reinitialize] Reinitializing server: ${serverName}`);
|
||||
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
await mcpManager.disconnectServer(serverName);
|
||||
logger.info(`[MCP Reinitialize] Disconnected existing server: ${serverName}`);
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
mcpManager.mcpConfigs[serverName] = serverConfig;
|
||||
let customUserVars = {};
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false);
|
||||
if (value) {
|
||||
customUserVars[varName] = value;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error fetching ${varName} for user ${user.id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let userConnection = null;
|
||||
let oauthRequired = false;
|
||||
let oauthUrl = null;
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
customUserVars,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
returnOnOAuth: true, // Return immediately when OAuth is initiated
|
||||
// Add OAuth handlers to capture the OAuth URL when needed
|
||||
oauthStart: async (authURL) => {
|
||||
logger.info(`[MCP Reinitialize] OAuth URL received: ${authURL}`);
|
||||
oauthUrl = authURL;
|
||||
oauthRequired = true;
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`[MCP Reinitialize] Successfully established connection for ${serverName}`);
|
||||
} catch (err) {
|
||||
logger.info(`[MCP Reinitialize] getUserConnection threw error: ${err.message}`);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth state - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
// Check if this is an OAuth error - if so, the flow state should be set up now
|
||||
const isOAuthError =
|
||||
err.message?.includes('OAuth') ||
|
||||
err.message?.includes('authentication') ||
|
||||
err.message?.includes('401');
|
||||
|
||||
const isOAuthFlowInitiated = err.message === 'OAuth flow initiated - return early';
|
||||
|
||||
if (isOAuthError || oauthRequired || isOAuthFlowInitiated) {
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth required for ${serverName} (isOAuthError: ${isOAuthError}, oauthRequired: ${oauthRequired}, isOAuthFlowInitiated: ${isOAuthFlowInitiated})`,
|
||||
);
|
||||
oauthRequired = true;
|
||||
// Don't return error - continue so frontend can handle OAuth
|
||||
} else {
|
||||
logger.error(
|
||||
`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`,
|
||||
err,
|
||||
);
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
}
|
||||
|
||||
// Only fetch and cache tools if we successfully connected (no OAuth required)
|
||||
if (userConnection && !oauthRequired) {
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
// Remove any old tools from this server in the user's cache
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Add the new tools from this server
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Save the updated user tool cache
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`[MCP Reinitialize] Sending response for ${serverName} - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: oauthRequired
|
||||
? `MCP server '${serverName}' ready for OAuth authentication`
|
||||
: `MCP server '${serverName}' reinitialized successfully`,
|
||||
serverName,
|
||||
oauthRequired,
|
||||
oauthUrl,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP Reinitialize] Unexpected error', error);
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get connection status for all MCP servers
|
||||
* This endpoint returns the actual connection status from MCPManager
|
||||
* This endpoint returns the actual connection status from MCPManager without disconnecting idle connections
|
||||
*/
|
||||
router.get('/connection/status', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
@@ -217,64 +478,85 @@ router.get('/connection/status', requireJwtAuth, async (req, res) => {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const mcpManager = getMCPManager();
|
||||
const mcpManager = getMCPManager(user.id);
|
||||
const connectionStatus = {};
|
||||
|
||||
// Get all MCP server names from custom config
|
||||
const config = await loadCustomConfig();
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
const mcpConfig = config?.mcpServers;
|
||||
|
||||
if (mcpConfig) {
|
||||
for (const [serverName, config] of Object.entries(mcpConfig)) {
|
||||
try {
|
||||
// Check if this is an app-level connection (exists in mcpManager.connections)
|
||||
const appConnection = mcpManager.getConnection(serverName);
|
||||
const hasAppConnection = !!appConnection;
|
||||
const appConnections = mcpManager.getAllConnections() || new Map();
|
||||
const userConnections = mcpManager.getUserConnections(user.id) || new Map();
|
||||
const oauthServers = mcpManager.getOAuthServers() || new Set();
|
||||
|
||||
// Check if this is a user-level connection (exists in mcpManager.userConnections)
|
||||
const userConnection = mcpManager.getUserConnectionIfExists(user.id, serverName);
|
||||
const hasUserConnection = !!userConnection;
|
||||
|
||||
// Determine if connected based on actual connection state
|
||||
let connected = false;
|
||||
if (hasAppConnection) {
|
||||
connected = await appConnection.isConnected();
|
||||
} else if (hasUserConnection) {
|
||||
connected = await userConnection.isConnected();
|
||||
}
|
||||
|
||||
// Determine if this server requires user authentication
|
||||
const hasAuthConfig =
|
||||
config.customUserVars && Object.keys(config.customUserVars).length > 0;
|
||||
const requiresOAuth = req.app.locals.mcpOAuthRequirements?.[serverName] || false;
|
||||
|
||||
connectionStatus[serverName] = {
|
||||
connected,
|
||||
hasAuthConfig,
|
||||
hasConnection: hasAppConnection || hasUserConnection,
|
||||
isAppLevel: hasAppConnection,
|
||||
isUserLevel: hasUserConnection,
|
||||
requiresOAuth,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP Connection Status] Error checking connection for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
connectionStatus[serverName] = {
|
||||
connected: false,
|
||||
hasAuthConfig: config.customUserVars && Object.keys(config.customUserVars).length > 0,
|
||||
hasConnection: false,
|
||||
isAppLevel: false,
|
||||
isUserLevel: false,
|
||||
requiresOAuth: req.app.locals.mcpOAuthRequirements?.[serverName] || false,
|
||||
error: error.message,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (!mcpConfig) {
|
||||
return res.status(404).json({ error: 'MCP config not found' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP Connection Status] Returning status for user ${user.id}:`, connectionStatus);
|
||||
// Get flow manager to check for active/timed-out OAuth flows
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
|
||||
for (const [serverName] of Object.entries(mcpConfig)) {
|
||||
const getConnectionState = (serverName) =>
|
||||
appConnections.get(serverName)?.connectionState ??
|
||||
userConnections.get(serverName)?.connectionState ??
|
||||
'disconnected';
|
||||
|
||||
const baseConnectionState = getConnectionState(serverName);
|
||||
|
||||
let hasActiveOAuthFlow = false;
|
||||
let hasFailedOAuthFlow = false;
|
||||
|
||||
if (baseConnectionState === 'disconnected' && oauthServers.has(serverName)) {
|
||||
try {
|
||||
// Check for user-specific OAuth flows
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
|
||||
if (flowState) {
|
||||
// Check if flow failed or timed out
|
||||
const flowAge = Date.now() - flowState.createdAt;
|
||||
const flowTTL = flowState.ttl || 180000; // Default 3 minutes
|
||||
|
||||
if (flowState.status === 'FAILED' || flowAge > flowTTL) {
|
||||
hasFailedOAuthFlow = true;
|
||||
logger.debug(`[MCP Connection Status] Found failed OAuth flow for ${serverName}`, {
|
||||
flowId,
|
||||
status: flowState.status,
|
||||
flowAge,
|
||||
flowTTL,
|
||||
timedOut: flowAge > flowTTL,
|
||||
});
|
||||
} else if (flowState.status === 'PENDING') {
|
||||
hasActiveOAuthFlow = true;
|
||||
logger.debug(`[MCP Connection Status] Found active OAuth flow for ${serverName}`, {
|
||||
flowId,
|
||||
flowAge,
|
||||
flowTTL,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP Connection Status] Error checking OAuth flows for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the final connection state
|
||||
let finalConnectionState = baseConnectionState;
|
||||
if (hasFailedOAuthFlow) {
|
||||
finalConnectionState = 'error'; // Report as error if OAuth failed
|
||||
} else if (hasActiveOAuthFlow && baseConnectionState === 'disconnected') {
|
||||
finalConnectionState = 'connecting'; // Still waiting for OAuth
|
||||
}
|
||||
|
||||
connectionStatus[serverName] = {
|
||||
requiresOAuth: oauthServers.has(serverName),
|
||||
connectionState: finalConnectionState,
|
||||
};
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
@@ -299,7 +581,8 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const config = await loadCustomConfig();
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
@@ -314,7 +597,7 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValueByPlugin(user.id, varName, pluginKey, false);
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false, pluginKey);
|
||||
// Only store boolean flag indicating if value exists
|
||||
authValueFlags[varName] = !!(value && value.length > 0);
|
||||
} catch (err) {
|
||||
@@ -342,337 +625,4 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Check if a specific MCP server requires OAuth
|
||||
* This endpoint checks if a specific MCP server requires OAuth authentication
|
||||
*/
|
||||
router.get('/:serverName/oauth/required', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const mcpManager = getMCPManager();
|
||||
const requiresOAuth = await mcpManager.isOAuthRequired(serverName);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
serverName,
|
||||
requiresOAuth,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP OAuth Required] Failed to check OAuth requirement for ${req.params.serverName}`,
|
||||
error,
|
||||
);
|
||||
res.status(500).json({ error: 'Failed to check OAuth requirement' });
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Complete MCP server reinitialization after OAuth
|
||||
* This endpoint completes the reinitialization process after OAuth authentication
|
||||
*/
|
||||
router.post('/:serverName/reinitialize/complete', requireJwtAuth, async (req, res) => {
|
||||
let responseSent = false;
|
||||
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
responseSent = true;
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP Complete Reinitialize] Starting completion for ${serverName}`);
|
||||
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
// Wait for connection to be established via event-driven approach
|
||||
const userConnection = await new Promise((resolve, reject) => {
|
||||
// Set a reasonable timeout (10 seconds)
|
||||
const timeout = setTimeout(() => {
|
||||
mcpManager.removeListener('connectionEstablished', connectionHandler);
|
||||
reject(new Error('Timeout waiting for connection establishment'));
|
||||
}, 10000);
|
||||
|
||||
const connectionHandler = ({
|
||||
userId: eventUserId,
|
||||
serverName: eventServerName,
|
||||
connection,
|
||||
}) => {
|
||||
if (eventUserId === user.id && eventServerName === serverName) {
|
||||
clearTimeout(timeout);
|
||||
mcpManager.removeListener('connectionEstablished', connectionHandler);
|
||||
resolve(connection);
|
||||
}
|
||||
};
|
||||
|
||||
// Check if connection already exists
|
||||
const existingConnection = mcpManager.getUserConnectionIfExists(user.id, serverName);
|
||||
if (existingConnection) {
|
||||
clearTimeout(timeout);
|
||||
resolve(existingConnection);
|
||||
return;
|
||||
}
|
||||
|
||||
// Listen for the connection establishment event
|
||||
mcpManager.on('connectionEstablished', connectionHandler);
|
||||
});
|
||||
|
||||
if (!userConnection) {
|
||||
responseSent = true;
|
||||
return res.status(404).json({ error: 'User connection not found' });
|
||||
}
|
||||
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
// Remove any old tools from this server in the user's cache
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Add the new tools from this server
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Save the updated user tool cache
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
|
||||
responseSent = true;
|
||||
res.json({
|
||||
success: true,
|
||||
message: `MCP server '${serverName}' reinitialized successfully`,
|
||||
serverName,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP Complete Reinitialize] Error completing reinitialization for ${req.params.serverName}:`,
|
||||
error,
|
||||
);
|
||||
|
||||
if (!responseSent) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: 'Failed to complete MCP server reinitialization',
|
||||
serverName: req.params.serverName,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Reinitialize MCP server
|
||||
* This endpoint allows reinitializing a specific MCP server
|
||||
*/
|
||||
router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => {
|
||||
let responseSent = false;
|
||||
|
||||
try {
|
||||
const { serverName } = req.params;
|
||||
const user = req.user;
|
||||
|
||||
if (!user?.id) {
|
||||
responseSent = true;
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
logger.info(`[MCP Reinitialize] Reinitializing server: ${serverName}`);
|
||||
|
||||
const config = await loadCustomConfig();
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
responseSent = true;
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
// Clean up any stale OAuth flows for this server
|
||||
try {
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
const existingFlow = await flowManager.getFlowState(flowId, 'mcp_oauth');
|
||||
if (existingFlow && existingFlow.status === 'PENDING') {
|
||||
logger.info(`[MCP Reinitialize] Cleaning up stale OAuth flow for ${serverName}`);
|
||||
await flowManager.failFlow(flowId, 'mcp_oauth', new Error('OAuth flow interrupted'));
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[MCP Reinitialize] Error cleaning up stale OAuth flow for ${serverName}:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
|
||||
await mcpManager.disconnectServer(serverName);
|
||||
logger.info(`[MCP Reinitialize] Disconnected existing server: ${serverName}`);
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
mcpManager.mcpConfigs[serverName] = serverConfig;
|
||||
let customUserVars = {};
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const pluginKey = `${Constants.mcp_prefix}${serverName}`;
|
||||
const value = await getUserPluginAuthValueByPlugin(user.id, varName, pluginKey, false);
|
||||
if (value) {
|
||||
customUserVars[varName] = value;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error fetching ${varName} for user ${user.id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let userConnection = null;
|
||||
let oauthRequired = false;
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
customUserVars,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
oauthStart: (authURL) => {
|
||||
// This will be called if OAuth is required
|
||||
oauthRequired = true;
|
||||
responseSent = true;
|
||||
logger.info(`[MCP Reinitialize] OAuth required for ${serverName}, auth URL: ${authURL}`);
|
||||
|
||||
// Get the flow ID for polling
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
|
||||
// Return the OAuth response immediately - client will poll for completion
|
||||
res.json({
|
||||
success: false,
|
||||
oauthRequired: true,
|
||||
authURL,
|
||||
flowId,
|
||||
message: `OAuth authentication required for MCP server '${serverName}'`,
|
||||
serverName,
|
||||
});
|
||||
},
|
||||
oauthEnd: () => {
|
||||
// This will be called when OAuth flow completes
|
||||
logger.info(`[MCP Reinitialize] OAuth flow completed for ${serverName}`);
|
||||
},
|
||||
});
|
||||
|
||||
// If response was already sent for OAuth, don't continue
|
||||
if (responseSent) {
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`, err);
|
||||
|
||||
// Check if this is an OAuth error
|
||||
if (err.message && err.message.includes('OAuth required')) {
|
||||
// Try to get the OAuth URL from the flow manager
|
||||
try {
|
||||
const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName);
|
||||
const existingFlow = await flowManager.getFlowState(flowId, 'mcp_oauth');
|
||||
|
||||
if (existingFlow && existingFlow.metadata) {
|
||||
const { serverUrl, oauth: oauthConfig } = existingFlow.metadata;
|
||||
if (serverUrl && oauthConfig) {
|
||||
const { authorizationUrl: authUrl } = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
serverName,
|
||||
serverUrl,
|
||||
user.id,
|
||||
oauthConfig,
|
||||
);
|
||||
|
||||
return res.json({
|
||||
success: false,
|
||||
oauthRequired: true,
|
||||
authURL: authUrl,
|
||||
flowId,
|
||||
message: `OAuth authentication required for MCP server '${serverName}'`,
|
||||
serverName,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (oauthErr) {
|
||||
logger.error(`[MCP Reinitialize] Error getting OAuth URL for ${serverName}:`, oauthErr);
|
||||
}
|
||||
|
||||
responseSent = true;
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
oauthRequired: true,
|
||||
message: `OAuth authentication required for MCP server '${serverName}'`,
|
||||
serverName,
|
||||
});
|
||||
}
|
||||
|
||||
responseSent = true;
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
// Remove any old tools from this server in the user's cache
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Add the new tools from this server
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Save the updated user tool cache
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
|
||||
responseSent = true;
|
||||
res.json({
|
||||
success: true,
|
||||
message: `MCP server '${serverName}' reinitialized successfully`,
|
||||
serverName,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP Reinitialize] Unexpected error', error);
|
||||
if (!responseSent) {
|
||||
res.status(500).json({ error: 'Internal server error' });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
const { agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
loadOCRConfig,
|
||||
EModelEndpoint,
|
||||
loadMemoryConfig,
|
||||
getConfigDefaults,
|
||||
loadWebSearchConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { agentsConfigSetup } = require('@librechat/api');
|
||||
const {
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
@@ -158,6 +157,10 @@ const AppService = async (app) => {
|
||||
}
|
||||
});
|
||||
|
||||
if (endpoints?.all) {
|
||||
endpointLocals.all = endpoints.all;
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
...defaultLocals,
|
||||
fileConfig: config?.fileConfig,
|
||||
|
||||
@@ -543,6 +543,206 @@ describe('AppService', () => {
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should correctly configure endpoint with titlePrompt, titleMethod, and titlePromptTemplate', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
},
|
||||
[EModelEndpoint.assistants]: {
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
},
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: azureGroups,
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check OpenAI endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toEqual(
|
||||
expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
}),
|
||||
);
|
||||
|
||||
// Check Assistants endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.assistants);
|
||||
expect(app.locals[EModelEndpoint.assistants]).toMatchObject({
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
});
|
||||
|
||||
// Check Azure OpenAI endpoint configuration
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.azureOpenAI);
|
||||
expect(app.locals[EModelEndpoint.azureOpenAI]).toEqual(
|
||||
expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure Agent endpoint with title generation settings', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.agents]: {
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: [AgentCapabilities.tools, AgentCapabilities.actions],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
|
||||
expect(app.locals[EModelEndpoint.agents]).toMatchObject({
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: expect.arrayContaining([AgentCapabilities.tools, AgentCapabilities.actions]),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing title configuration options with defaults', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
// titlePrompt and titlePromptTemplate are not provided
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
});
|
||||
// Check that the optional fields are undefined when not provided
|
||||
expect(app.locals[EModelEndpoint.openAI].titlePrompt).toBeUndefined();
|
||||
expect(app.locals[EModelEndpoint.openAI].titlePromptTemplate).toBeUndefined();
|
||||
expect(app.locals[EModelEndpoint.openAI].titleMethod).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should correctly configure titleEndpoint when specified', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
},
|
||||
[EModelEndpoint.agents]: {
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check OpenAI endpoint has titleEndpoint
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
});
|
||||
|
||||
// Check Agents endpoint has titleEndpoint
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
|
||||
expect(app.locals[EModelEndpoint.agents]).toMatchObject({
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly configure all endpoint when specified', async () => {
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
},
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Check that 'all' endpoint config is loaded
|
||||
expect(app.locals).toHaveProperty('all');
|
||||
expect(app.locals.all).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
});
|
||||
|
||||
// Check that OpenAI endpoint has its own config
|
||||
expect(app.locals).toHaveProperty(EModelEndpoint.openAI);
|
||||
expect(app.locals[EModelEndpoint.openAI]).toMatchObject({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('AppService updating app.locals and issuing warnings', () => {
|
||||
|
||||
@@ -3,7 +3,6 @@ const { isEnabled, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { normalizeEndpointName } = require('~/server/utils');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const { getCachedTools } = require('./getCachedTools');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
@@ -12,8 +11,8 @@ const getLogStores = require('~/cache/getLogStores');
|
||||
* @returns {Promise<TCustomConfig | null>}
|
||||
* */
|
||||
async function getCustomConfig() {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
return (await cache.get(CacheKeys.CUSTOM_CONFIG)) || (await loadCustomConfig());
|
||||
const cache = getLogStores(CacheKeys.STATIC_CONFIG);
|
||||
return (await cache.get(CacheKeys.LIBRECHAT_YAML_CONFIG)) || (await loadCustomConfig());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,13 +65,9 @@ async function getMCPAuthMap({ userId, tools, findPluginAuthsByKeys }) {
|
||||
if (!tools || tools.length === 0) {
|
||||
return;
|
||||
}
|
||||
const appTools = await getCachedTools({
|
||||
userId,
|
||||
});
|
||||
return await getUserMCPAuthMap({
|
||||
tools,
|
||||
userId,
|
||||
appTools,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
} catch (err) {
|
||||
|
||||
@@ -25,7 +25,7 @@ let i = 0;
|
||||
* @function loadCustomConfig
|
||||
* @returns {Promise<TCustomConfig | null>} A promise that resolves to null or the custom config object.
|
||||
* */
|
||||
async function loadCustomConfig() {
|
||||
async function loadCustomConfig(printConfig = true) {
|
||||
// Use CONFIG_PATH if set, otherwise fallback to defaultConfigPath
|
||||
const configPath = process.env.CONFIG_PATH || defaultConfigPath;
|
||||
|
||||
@@ -108,7 +108,11 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
||||
|
||||
return null;
|
||||
} else {
|
||||
logger.debug('Custom config:', customConfig);
|
||||
if (printConfig) {
|
||||
logger.info('Custom config file loaded:');
|
||||
logger.info(JSON.stringify(customConfig, null, 2));
|
||||
logger.debug('Custom config:', customConfig);
|
||||
}
|
||||
}
|
||||
|
||||
(customConfig.endpoints?.custom ?? [])
|
||||
@@ -116,8 +120,8 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
||||
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
|
||||
|
||||
if (customConfig.cache) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig);
|
||||
const cache = getLogStores(CacheKeys.STATIC_CONFIG);
|
||||
await cache.set(CacheKeys.LIBRECHAT_YAML_CONFIG, customConfig);
|
||||
}
|
||||
|
||||
if (result.data.modelSpecs) {
|
||||
|
||||
@@ -104,7 +104,7 @@ const initializeAgent = async ({
|
||||
|
||||
agent.endpoint = provider;
|
||||
const { getOptions, overrideProvider } = await getProviderConfig(provider);
|
||||
if (overrideProvider) {
|
||||
if (overrideProvider !== agent.provider) {
|
||||
agent.provider = overrideProvider;
|
||||
}
|
||||
|
||||
@@ -131,7 +131,7 @@ const initializeAgent = async ({
|
||||
);
|
||||
const agentMaxContextTokens = optionalChainWithEmptyCheck(
|
||||
maxContextTokens,
|
||||
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
|
||||
getModelMaxTokens(tokensModel, providerEndpointMap[provider], options.endpointTokenConfig),
|
||||
4096,
|
||||
);
|
||||
|
||||
@@ -186,11 +186,12 @@ const initializeAgent = async ({
|
||||
|
||||
return {
|
||||
...agent,
|
||||
tools,
|
||||
attachments,
|
||||
resendFiles,
|
||||
toolContextMap,
|
||||
tools,
|
||||
maxContextTokens: (agentMaxContextTokens - maxTokens) * 0.9,
|
||||
useLegacyContent: !!options.useLegacyContent,
|
||||
maxContextTokens: Math.round((agentMaxContextTokens - maxTokens) * 0.9),
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKeyValues,
|
||||
@@ -59,7 +59,10 @@ const initializeClient = async ({ req, res, endpointOption, version, initAppClie
|
||||
}
|
||||
|
||||
if (PROXY) {
|
||||
opts.httpAgent = new HttpsProxyAgent(PROXY);
|
||||
const proxyAgent = new ProxyAgent(PROXY);
|
||||
opts.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
if (OPENAI_ORGANIZATION) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initalize');
|
||||
@@ -107,6 +107,7 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
expect(openai.httpAgent).toBeInstanceOf(HttpsProxyAgent);
|
||||
expect(openai.fetchOptions).toBeDefined();
|
||||
expect(openai.fetchOptions.dispatcher).toBeInstanceOf(ProxyAgent);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { constructAzureURL, isUserProvided, resolveHeaders } = require('@librechat/api');
|
||||
const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider');
|
||||
const {
|
||||
@@ -158,7 +158,10 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
|
||||
}
|
||||
|
||||
if (PROXY) {
|
||||
opts.httpAgent = new HttpsProxyAgent(PROXY);
|
||||
const proxyAgent = new ProxyAgent(PROXY);
|
||||
opts.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
if (OPENAI_ORGANIZATION) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initialize');
|
||||
@@ -107,6 +107,7 @@ describe('initializeClient', () => {
|
||||
const res = {};
|
||||
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
expect(openai.httpAgent).toBeInstanceOf(HttpsProxyAgent);
|
||||
expect(openai.fetchOptions).toBeDefined();
|
||||
expect(openai.fetchOptions.dispatcher).toBeInstanceOf(ProxyAgent);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -139,7 +139,11 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getOpenAIConfig(apiKey, clientOptions, endpoint);
|
||||
if (!customOptions.streamRate) {
|
||||
if (options != null) {
|
||||
options.useLegacyContent = true;
|
||||
options.endpointTokenConfig = endpointTokenConfig;
|
||||
}
|
||||
if (!clientOptions.streamRate) {
|
||||
return options;
|
||||
}
|
||||
options.llmConfig.callbacks = [
|
||||
@@ -156,6 +160,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
}
|
||||
|
||||
return {
|
||||
useLegacyContent: true,
|
||||
llmConfig: modelOptions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -34,13 +34,13 @@ const providerConfigMap = {
|
||||
* @param {string} provider - The provider string
|
||||
* @returns {Promise<{
|
||||
* getOptions: Function,
|
||||
* overrideProvider?: string,
|
||||
* overrideProvider: string,
|
||||
* customEndpointConfig?: TEndpoint
|
||||
* }>}
|
||||
*/
|
||||
async function getProviderConfig(provider) {
|
||||
let getOptions = providerConfigMap[provider];
|
||||
let overrideProvider;
|
||||
let overrideProvider = provider;
|
||||
/** @type {TEndpoint | undefined} */
|
||||
let customEndpointConfig;
|
||||
|
||||
@@ -56,7 +56,7 @@ async function getProviderConfig(provider) {
|
||||
overrideProvider = Providers.OPENAI;
|
||||
}
|
||||
|
||||
if (isKnownCustomProvider(overrideProvider || provider) && !customEndpointConfig) {
|
||||
if (isKnownCustomProvider(overrideProvider) && !customEndpointConfig) {
|
||||
customEndpointConfig = await getCustomEndpointConfig(provider);
|
||||
if (!customEndpointConfig) {
|
||||
throw new Error(`Provider ${provider} not supported`);
|
||||
|
||||
@@ -65,19 +65,20 @@ const initializeClient = async ({
|
||||
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
|
||||
/** @type {false | TAzureConfig} */
|
||||
const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI];
|
||||
|
||||
let serverless = false;
|
||||
if (isAzureOpenAI && azureConfig) {
|
||||
const { modelGroupMap, groupMap } = azureConfig;
|
||||
const {
|
||||
azureOptions,
|
||||
baseURL,
|
||||
headers = {},
|
||||
serverless,
|
||||
serverless: _serverless,
|
||||
} = mapModelToAzureConfig({
|
||||
modelName,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
serverless = _serverless;
|
||||
|
||||
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders(
|
||||
@@ -143,6 +144,9 @@ const initializeClient = async ({
|
||||
clientOptions = Object.assign({ modelOptions }, clientOptions);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getOpenAIConfig(apiKey, clientOptions);
|
||||
if (options != null && serverless === true) {
|
||||
options.useLegacyContent = true;
|
||||
}
|
||||
const streamRate = clientOptions.streamRate;
|
||||
if (!streamRate) {
|
||||
return options;
|
||||
|
||||
@@ -235,6 +235,7 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
responseFormat: AgentConstants.CONTENT_AND_ARTIFACT,
|
||||
});
|
||||
toolInstance.mcp = true;
|
||||
toolInstance.mcpRawServerName = serverName;
|
||||
return toolInstance;
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ const { findOnePluginAuth, updatePluginAuth, deletePluginAuth } = require('~/mod
|
||||
* @param {string} userId - The unique identifier of the user for whom the plugin authentication value is to be retrieved.
|
||||
* @param {string} authField - The specific authentication field (e.g., 'API_KEY', 'URL') whose value is to be retrieved and decrypted.
|
||||
* @param {boolean} throwError - Whether to throw an error if the authentication value does not exist. Defaults to `true`.
|
||||
* @param {string} [pluginKey] - Optional plugin key to make the lookup more specific to a particular plugin.
|
||||
* @returns {Promise<string|null>} A promise that resolves to the decrypted authentication value if found, or `null` if no such authentication value exists for the given user and field.
|
||||
*
|
||||
* The function throws an error if it encounters any issue during the retrieval or decryption process, or if the authentication value does not exist.
|
||||
@@ -20,14 +21,28 @@ const { findOnePluginAuth, updatePluginAuth, deletePluginAuth } = require('~/mod
|
||||
* console.error(err);
|
||||
* });
|
||||
*
|
||||
* @example
|
||||
* // To get the decrypted value of the 'API_KEY' field for a specific plugin:
|
||||
* getUserPluginAuthValue('12345', 'API_KEY', true, 'mcp-server-name').then(value => {
|
||||
* console.log(value);
|
||||
* }).catch(err => {
|
||||
* console.error(err);
|
||||
* });
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue during the retrieval or decryption process, or if the authentication value does not exist.
|
||||
* @async
|
||||
*/
|
||||
const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
|
||||
const getUserPluginAuthValue = async (userId, authField, throwError = true, pluginKey) => {
|
||||
try {
|
||||
const pluginAuth = await findOnePluginAuth({ userId, authField });
|
||||
const searchParams = { userId, authField };
|
||||
if (pluginKey) {
|
||||
searchParams.pluginKey = pluginKey;
|
||||
}
|
||||
|
||||
const pluginAuth = await findOnePluginAuth(searchParams);
|
||||
if (!pluginAuth) {
|
||||
throw new Error(`No plugin auth ${authField} found for user ${userId}`);
|
||||
const pluginInfo = pluginKey ? ` for plugin ${pluginKey}` : '';
|
||||
throw new Error(`No plugin auth ${authField} found for user ${userId}${pluginInfo}`);
|
||||
}
|
||||
|
||||
const decryptedValue = await decrypt(pluginAuth.value);
|
||||
@@ -41,38 +56,6 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Asynchronously retrieves and decrypts the authentication value for a user's specific plugin, based on a specified authentication field and plugin key.
|
||||
*
|
||||
* @param {string} userId - The unique identifier of the user for whom the plugin authentication value is to be retrieved.
|
||||
* @param {string} authField - The specific authentication field (e.g., 'API_KEY', 'URL') whose value is to be retrieved and decrypted.
|
||||
* @param {string} pluginKey - The plugin key to filter by (e.g., 'mcp_github-mcp').
|
||||
* @param {boolean} throwError - Whether to throw an error if the authentication value does not exist. Defaults to `true`.
|
||||
* @returns {Promise<string|null>} A promise that resolves to the decrypted authentication value if found, or `null` if no such authentication value exists for the given user, field, and plugin.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue during the retrieval or decryption process, or if the authentication value does not exist.
|
||||
* @async
|
||||
*/
|
||||
const getUserPluginAuthValueByPlugin = async (userId, authField, pluginKey, throwError = true) => {
|
||||
try {
|
||||
const pluginAuth = await findOnePluginAuth({ userId, authField, pluginKey });
|
||||
if (!pluginAuth) {
|
||||
throw new Error(
|
||||
`No plugin auth ${authField} found for user ${userId} and plugin ${pluginKey}`,
|
||||
);
|
||||
}
|
||||
|
||||
const decryptedValue = await decrypt(pluginAuth.value);
|
||||
return decryptedValue;
|
||||
} catch (err) {
|
||||
if (!throwError) {
|
||||
return null;
|
||||
}
|
||||
logger.error('[getUserPluginAuthValueByPlugin]', err);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
// const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
|
||||
// try {
|
||||
// const encryptedValue = encrypt(value);
|
||||
@@ -151,7 +134,6 @@ const deleteUserPluginAuth = async (userId, authField, all = false, pluginKey) =
|
||||
|
||||
module.exports = {
|
||||
getUserPluginAuthValue,
|
||||
getUserPluginAuthValueByPlugin,
|
||||
updateUserPluginAuth,
|
||||
deleteUserPluginAuth,
|
||||
};
|
||||
|
||||
@@ -10,15 +10,6 @@ const { getLogStores } = require('~/cache');
|
||||
* @param {import('express').Application} app - Express app instance
|
||||
*/
|
||||
async function initializeMCPs(app) {
|
||||
// TEMPORARY: Reset all OAuth tokens for fresh testing
|
||||
try {
|
||||
logger.info('[MCP] Resetting all OAuth tokens for fresh testing...');
|
||||
await deleteTokens({});
|
||||
logger.info('[MCP] All OAuth tokens reset successfully');
|
||||
} catch (error) {
|
||||
logger.error('[MCP] Error resetting OAuth tokens:', error);
|
||||
}
|
||||
|
||||
const mcpServers = app.locals.mcpConfig;
|
||||
if (!mcpServers) {
|
||||
return;
|
||||
@@ -45,7 +36,7 @@ async function initializeMCPs(app) {
|
||||
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
||||
|
||||
try {
|
||||
const oauthRequirements = await mcpManager.initializeMCPs({
|
||||
await mcpManager.initializeMCPs({
|
||||
mcpServers: filteredServers,
|
||||
flowManager,
|
||||
tokenMethods: {
|
||||
@@ -73,9 +64,6 @@ async function initializeMCPs(app) {
|
||||
logger.debug('Cleared tools array cache after MCP initialization');
|
||||
|
||||
logger.info('MCP servers initialized successfully');
|
||||
|
||||
// Store OAuth requirement information in app locals for client access
|
||||
app.locals.mcpOAuthRequirements = oauthRequirements;
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize MCP servers:', error);
|
||||
}
|
||||
|
||||
@@ -52,6 +52,11 @@ function assistantsConfigSetup(config, assistantsEndpoint, prevConfig = {}) {
|
||||
privateAssistants: parsedConfig.privateAssistants,
|
||||
timeoutMs: parsedConfig.timeoutMs,
|
||||
streamRate: parsedConfig.streamRate,
|
||||
titlePrompt: parsedConfig.titlePrompt,
|
||||
titleMethod: parsedConfig.titleMethod,
|
||||
titleModel: parsedConfig.titleModel,
|
||||
titleEndpoint: parsedConfig.titleEndpoint,
|
||||
titlePromptTemplate: parsedConfig.titlePromptTemplate,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { webSearchKeys } = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
webSearchKeys,
|
||||
deprecatedAzureVariables,
|
||||
conflictingAzureVariables,
|
||||
extractVariableName,
|
||||
|
||||
@@ -50,6 +50,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
temporaryChat: interfaceConfig?.temporaryChat ?? defaults.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode ?? defaults.runCode,
|
||||
webSearch: interfaceConfig?.webSearch ?? defaults.webSearch,
|
||||
fileSearch: interfaceConfig?.fileSearch ?? defaults.fileSearch,
|
||||
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
|
||||
});
|
||||
|
||||
@@ -65,6 +66,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
|
||||
});
|
||||
await updateAccessPermissions(SystemRoles.ADMIN, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
|
||||
@@ -78,6 +80,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
|
||||
});
|
||||
|
||||
let i = 0;
|
||||
|
||||
@@ -18,6 +18,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -27,12 +28,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -47,6 +49,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: false,
|
||||
runCode: false,
|
||||
webSearch: false,
|
||||
fileSearch: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -56,12 +59,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -74,12 +78,16 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: undefined,
|
||||
[Permissions.OPT_OUT]: undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -94,6 +102,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
fileSearch: undefined,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -103,12 +112,16 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: undefined,
|
||||
[Permissions.OPT_OUT]: undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -123,6 +136,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: undefined,
|
||||
runCode: false,
|
||||
webSearch: true,
|
||||
fileSearch: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -132,12 +146,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -153,6 +168,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: true,
|
||||
runCode: true,
|
||||
webSearch: true,
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -161,12 +177,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -179,12 +196,16 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: undefined,
|
||||
[Permissions.OPT_OUT]: undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -197,12 +218,16 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: undefined,
|
||||
[Permissions.OPT_OUT]: undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -215,12 +240,16 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: undefined,
|
||||
[Permissions.OPT_OUT]: undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -234,6 +263,7 @@ describe('loadDefaultInterface', () => {
|
||||
agents: false,
|
||||
temporaryChat: true,
|
||||
runCode: false,
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
@@ -243,12 +273,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -264,6 +295,7 @@ describe('loadDefaultInterface', () => {
|
||||
temporaryChat: undefined,
|
||||
runCode: undefined,
|
||||
webSearch: undefined,
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -272,12 +304,13 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -300,12 +333,90 @@ describe('loadDefaultInterface', () => {
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call updateAccessPermissions with the correct parameters when FILE_SEARCH is true', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
||||
await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call updateAccessPermissions with false when FILE_SEARCH is false', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
fileSearch: false,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
||||
await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
|
||||
});
|
||||
});
|
||||
|
||||
it('should call updateAccessPermissions with all interface options including fileSearch', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: true,
|
||||
bookmarks: false,
|
||||
memories: true,
|
||||
multiConvo: true,
|
||||
agents: false,
|
||||
temporaryChat: true,
|
||||
runCode: false,
|
||||
webSearch: true,
|
||||
fileSearch: true,
|
||||
},
|
||||
};
|
||||
const configDefaults = { interface: {} };
|
||||
|
||||
await loadDefaultInterface(config, configDefaults);
|
||||
|
||||
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,3 +10,4 @@ process.env.JWT_SECRET = 'test';
|
||||
process.env.JWT_REFRESH_SECRET = 'test';
|
||||
process.env.CREDS_KEY = 'test';
|
||||
process.env.CREDS_IV = 'test';
|
||||
process.env.OPENAI_API_KEY = 'test';
|
||||
|
||||
@@ -226,7 +226,14 @@ const xAIModels = {
|
||||
'grok-4': 256000, // 256K context
|
||||
};
|
||||
|
||||
const aggregateModels = { ...openAIModels, ...googleModels, ...bedrockModels, ...xAIModels };
|
||||
const aggregateModels = {
|
||||
...openAIModels,
|
||||
...googleModels,
|
||||
...bedrockModels,
|
||||
...xAIModels,
|
||||
// misc.
|
||||
kimi: 131000,
|
||||
};
|
||||
|
||||
const maxTokensMap = {
|
||||
[EModelEndpoint.azureOpenAI]: openAIModels,
|
||||
|
||||
@@ -714,3 +714,45 @@ describe('Claude Model Tests', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Kimi Model Tests', () => {
|
||||
describe('getModelMaxTokens', () => {
|
||||
test('should return correct tokens for Kimi models', () => {
|
||||
expect(getModelMaxTokens('kimi')).toBe(131000);
|
||||
expect(getModelMaxTokens('kimi-k2')).toBe(131000);
|
||||
expect(getModelMaxTokens('kimi-vl')).toBe(131000);
|
||||
});
|
||||
|
||||
test('should return correct tokens for Kimi models with provider prefix', () => {
|
||||
expect(getModelMaxTokens('moonshotai/kimi-k2')).toBe(131000);
|
||||
expect(getModelMaxTokens('moonshotai/kimi')).toBe(131000);
|
||||
expect(getModelMaxTokens('moonshotai/kimi-vl')).toBe(131000);
|
||||
});
|
||||
|
||||
test('should handle partial matches for Kimi models', () => {
|
||||
expect(getModelMaxTokens('kimi-k2-latest')).toBe(131000);
|
||||
expect(getModelMaxTokens('kimi-vl-preview')).toBe(131000);
|
||||
expect(getModelMaxTokens('kimi-2024')).toBe(131000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('matchModelName', () => {
|
||||
test('should match exact Kimi model names', () => {
|
||||
expect(matchModelName('kimi')).toBe('kimi');
|
||||
expect(matchModelName('kimi-k2')).toBe('kimi');
|
||||
expect(matchModelName('kimi-vl')).toBe('kimi');
|
||||
});
|
||||
|
||||
test('should match Kimi model variations with provider prefix', () => {
|
||||
expect(matchModelName('moonshotai/kimi')).toBe('kimi');
|
||||
expect(matchModelName('moonshotai/kimi-k2')).toBe('kimi');
|
||||
expect(matchModelName('moonshotai/kimi-vl')).toBe('kimi');
|
||||
});
|
||||
|
||||
test('should match Kimi model variations with suffixes', () => {
|
||||
expect(matchModelName('kimi-k2-latest')).toBe('kimi');
|
||||
expect(matchModelName('kimi-vl-preview')).toBe('kimi');
|
||||
expect(matchModelName('kimi-2024')).toBe('kimi');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/frontend",
|
||||
"version": "v0.7.9-rc1",
|
||||
"version": "v0.7.9",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -34,6 +34,7 @@
|
||||
"@dicebear/collection": "^9.2.2",
|
||||
"@dicebear/core": "^9.2.2",
|
||||
"@headlessui/react": "^2.1.2",
|
||||
"@librechat/client": "*",
|
||||
"@marsidev/react-turnstile": "^1.1.0",
|
||||
"@radix-ui/react-accordion": "^1.1.2",
|
||||
"@radix-ui/react-alert-dialog": "^1.0.2",
|
||||
@@ -70,6 +71,7 @@
|
||||
"i18next": "^24.2.2",
|
||||
"i18next-browser-languagedetector": "^8.0.3",
|
||||
"input-otp": "^1.4.2",
|
||||
"jotai": "^2.12.5",
|
||||
"js-cookie": "^3.0.5",
|
||||
"librechat-data-provider": "*",
|
||||
"lodash": "^4.17.21",
|
||||
|
||||
@@ -4,10 +4,9 @@ import { RouterProvider } from 'react-router-dom';
|
||||
import * as RadixToast from '@radix-ui/react-toast';
|
||||
import { HTML5Backend } from 'react-dnd-html5-backend';
|
||||
import { ReactQueryDevtools } from '@tanstack/react-query-devtools';
|
||||
import { Toast, ThemeProvider, ToastProvider } from '@librechat/client';
|
||||
import { QueryClient, QueryClientProvider, QueryCache } from '@tanstack/react-query';
|
||||
import { ScreenshotProvider, ThemeProvider, useApiErrorBoundary } from './hooks';
|
||||
import { ToastProvider } from './Providers';
|
||||
import Toast from './components/ui/Toast';
|
||||
import { ScreenshotProvider, useApiErrorBoundary } from './hooks';
|
||||
import { LiveAnnouncer } from '~/a11y';
|
||||
import { router } from './routes';
|
||||
|
||||
|
||||
31
client/src/Providers/SidePanelContext.tsx
Normal file
31
client/src/Providers/SidePanelContext.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import React, { createContext, useContext, useMemo } from 'react';
|
||||
import type { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useChatContext } from './ChatContext';
|
||||
|
||||
interface SidePanelContextValue {
|
||||
endpoint?: EModelEndpoint | null;
|
||||
}
|
||||
|
||||
const SidePanelContext = createContext<SidePanelContextValue | undefined>(undefined);
|
||||
|
||||
export function SidePanelProvider({ children }: { children: React.ReactNode }) {
|
||||
const { conversation } = useChatContext();
|
||||
|
||||
/** Context value only created when endpoint changes */
|
||||
const contextValue = useMemo<SidePanelContextValue>(
|
||||
() => ({
|
||||
endpoint: conversation?.endpoint,
|
||||
}),
|
||||
[conversation?.endpoint],
|
||||
);
|
||||
|
||||
return <SidePanelContext.Provider value={contextValue}>{children}</SidePanelContext.Provider>;
|
||||
}
|
||||
|
||||
export function useSidePanelContext() {
|
||||
const context = useContext(SidePanelContext);
|
||||
if (!context) {
|
||||
throw new Error('useSidePanelContext must be used within SidePanelProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
@@ -1,11 +1,9 @@
|
||||
export { default as AssistantsProvider } from './AssistantsContext';
|
||||
export { default as AgentsProvider } from './AgentsContext';
|
||||
export { default as ToastProvider } from './ToastContext';
|
||||
export * from './ActivePanelContext';
|
||||
export * from './AgentPanelContext';
|
||||
export * from './ChatContext';
|
||||
export * from './ShareContext';
|
||||
export * from './ToastContext';
|
||||
export * from './FileMapContext';
|
||||
export * from './AddedChatContext';
|
||||
export * from './EditorContext';
|
||||
@@ -24,4 +22,5 @@ export * from './ToolCallsMapContext';
|
||||
export * from './SetConvoContext';
|
||||
export * from './SearchContext';
|
||||
export * from './BadgeRowContext';
|
||||
export * from './SidePanelContext';
|
||||
export { default as BadgeRowProvider } from './BadgeRowContext';
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
// client/src/a11y/LiveAnnouncer.tsx
|
||||
import React, { useState, useCallback, useRef, useEffect, useMemo } from 'react';
|
||||
import type { AnnounceOptions } from '~/common';
|
||||
import AnnouncerContext from '~/Providers/AnnouncerContext';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import Announcer from './Announcer';
|
||||
|
||||
interface LiveAnnouncerProps {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
export type RenderProp<
|
||||
P = React.HTMLAttributes<any> & {
|
||||
ref?: React.Ref<any>;
|
||||
|
||||
@@ -206,7 +206,9 @@ export type AgentPanelProps = {
|
||||
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
||||
setMcp: React.Dispatch<React.SetStateAction<t.MCP | undefined>>;
|
||||
setAction: React.Dispatch<React.SetStateAction<t.Action | undefined>>;
|
||||
endpointsConfig?: t.TEndpointsConfig;
|
||||
setCurrentAgentId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||
agentsConfig?: t.TAgentsEndpoint | null;
|
||||
};
|
||||
|
||||
export type AgentPanelContextType = {
|
||||
@@ -217,14 +219,12 @@ export type AgentPanelContextType = {
|
||||
mcps?: t.MCP[];
|
||||
setMcp: React.Dispatch<React.SetStateAction<t.MCP | undefined>>;
|
||||
setMcps: React.Dispatch<React.SetStateAction<t.MCP[] | undefined>>;
|
||||
groupedTools: Record<string, t.AgentToolType & { tools?: t.AgentToolType[] }>;
|
||||
tools: t.AgentToolType[];
|
||||
activePanel?: string;
|
||||
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
||||
setCurrentAgentId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||
groupedTools?: Record<string, t.AgentToolType & { tools?: t.AgentToolType[] }>;
|
||||
agent_id?: string;
|
||||
agentsConfig?: t.TAgentsEndpoint | null;
|
||||
endpointsConfig?: t.TEndpointsConfig | null;
|
||||
};
|
||||
|
||||
export type AgentModelPanelProps = {
|
||||
@@ -336,16 +336,13 @@ export type TAskProps = {
|
||||
export type TOptions = {
|
||||
editedMessageId?: string | null;
|
||||
editedText?: string | null;
|
||||
editedContent?: {
|
||||
index: number;
|
||||
text: string;
|
||||
type: 'text' | 'think';
|
||||
};
|
||||
isRegenerate?: boolean;
|
||||
isContinued?: boolean;
|
||||
isEdited?: boolean;
|
||||
overrideMessages?: t.TMessage[];
|
||||
/** Currently only utilized when resubmitting user-created message, uses that message's currently attached files */
|
||||
/** This value is only true when the user submits a message with "Save & Submit" for a user-created message */
|
||||
isResubmission?: boolean;
|
||||
/** Currently only utilized when `isResubmission === true`, uses that message's currently attached files */
|
||||
overrideFiles?: t.TMessage['files'];
|
||||
};
|
||||
|
||||
|
||||
@@ -6,9 +6,9 @@ import type { SandpackPreviewRef, CodeEditorRef } from '@codesandbox/sandpack-re
|
||||
import useArtifacts from '~/hooks/Artifacts/useArtifacts';
|
||||
import DownloadArtifact from './DownloadArtifact';
|
||||
import { useEditorContext } from '~/Providers';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import ArtifactTabs from './ArtifactTabs';
|
||||
import { CopyCodeButton } from './Code';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import store from '~/store';
|
||||
|
||||
export default function Artifacts() {
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import React, { memo, useEffect, useRef, useState } from 'react';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import rehypeKatex from 'rehype-katex';
|
||||
import ReactMarkdown from 'react-markdown';
|
||||
import rehypeHighlight from 'rehype-highlight';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import { Clipboard, CheckMark } from '@librechat/client';
|
||||
import { handleDoubleClick, langSubset } from '~/utils';
|
||||
import Clipboard from '~/components/svg/Clipboard';
|
||||
import CheckMark from '~/components/svg/CheckMark';
|
||||
import useLocalize from '~/hooks/useLocalize';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
type TCodeProps = {
|
||||
inline: boolean;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { useState } from 'react';
|
||||
import { Download } from 'lucide-react';
|
||||
import type { Artifact } from '~/common';
|
||||
import { CheckMark } from '@librechat/client';
|
||||
import useArtifactProps from '~/hooks/Artifacts/useArtifactProps';
|
||||
import { useEditorContext } from '~/Providers';
|
||||
import { CheckMark } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const DownloadArtifact = ({
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import mermaid from 'mermaid';
|
||||
import { Button } from '@librechat/client';
|
||||
import { TransformWrapper, TransformComponent, ReactZoomPanPinchRef } from 'react-zoom-pan-pinch';
|
||||
// import { Button } from '/components/ui/Button'; // Live component
|
||||
import { Button } from '~/components/ui/Button';
|
||||
import { ZoomIn, ZoomOut, RefreshCw } from 'lucide-react';
|
||||
|
||||
interface MermaidDiagramProps {
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import type { TMessageAudio } from '~/common';
|
||||
import { VolumeIcon, VolumeMuteIcon, Spinner } from '@librechat/client';
|
||||
import { useLocalize, useTTSBrowser, useTTSExternal } from '~/hooks';
|
||||
import { VolumeIcon, VolumeMuteIcon, Spinner } from '~/components';
|
||||
import { logger } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import React from 'react';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { Dropdown } from '@librechat/client';
|
||||
import type { Option } from '~/common';
|
||||
import { useLocalize, useTTSBrowser, useTTSExternal } from '~/hooks';
|
||||
import { Dropdown } from '~/components/ui';
|
||||
import { logger } from '~/utils';
|
||||
import store from '~/store';
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { TranslationKeys, useLocalize } from '~/hooks';
|
||||
import { ThemeSelector } from '@librechat/client';
|
||||
import { TStartupConfig } from 'librechat-data-provider';
|
||||
import { ErrorMessage } from '~/components/Auth/ErrorMessage';
|
||||
import { TranslationKeys, useLocalize } from '~/hooks';
|
||||
import SocialLoginRender from './SocialLoginRender';
|
||||
import { BlinkAnimation } from './BlinkAnimation';
|
||||
import { ThemeSelector } from '~/components';
|
||||
import { Banner } from '../Banners';
|
||||
import Footer from './Footer';
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useOutletContext, useSearchParams } from 'react-router-dom';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useAuthContext } from '~/hooks/AuthContext';
|
||||
import { OpenIDIcon } from '@librechat/client';
|
||||
import type { TLoginLayoutContext } from '~/common';
|
||||
import { ErrorMessage } from '~/components/Auth/ErrorMessage';
|
||||
import SocialButton from '~/components/Auth/SocialButton';
|
||||
import { OpenIDIcon } from '~/components';
|
||||
import { useAuthContext } from '~/hooks/AuthContext';
|
||||
import { getLoginError } from '~/utils';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import LoginForm from './LoginForm';
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { useForm } from 'react-hook-form';
|
||||
import React, { useState, useEffect, useContext } from 'react';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { Turnstile } from '@marsidev/react-turnstile';
|
||||
import { ThemeContext, Spinner, Button } from '@librechat/client';
|
||||
import type { TLoginUser, TStartupConfig } from 'librechat-data-provider';
|
||||
import type { TAuthContext } from '~/common';
|
||||
import { useResendVerificationEmail, useGetStartupConfig } from '~/data-provider';
|
||||
import { ThemeContext, useLocalize } from '~/hooks';
|
||||
import { Spinner, Button } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
type TLoginFormProps = {
|
||||
onSubmit: (data: TLoginUser) => void;
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { useForm } from 'react-hook-form';
|
||||
import React, { useContext, useState } from 'react';
|
||||
import { Turnstile } from '@marsidev/react-turnstile';
|
||||
import { ThemeContext, Spinner, Button } from '@librechat/client';
|
||||
import { useNavigate, useOutletContext, useLocation } from 'react-router-dom';
|
||||
import { useRegisterUserMutation } from 'librechat-data-provider/react-query';
|
||||
import type { TRegisterUser, TError } from 'librechat-data-provider';
|
||||
import { useLocalize, TranslationKeys, ThemeContext } from '~/hooks';
|
||||
import type { TLoginLayoutContext } from '~/common';
|
||||
import { Spinner, Button } from '~/components';
|
||||
import { useLocalize, TranslationKeys } from '~/hooks';
|
||||
import { ErrorMessage } from './ErrorMessage';
|
||||
|
||||
const Registration: React.FC = () => {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { useState, ReactNode } from 'react';
|
||||
import { Spinner, Button } from '@librechat/client';
|
||||
import { useOutletContext } from 'react-router-dom';
|
||||
import { useRequestPasswordResetMutation } from 'librechat-data-provider/react-query';
|
||||
import type { TRequestPasswordReset, TRequestPasswordResetResponse } from 'librechat-data-provider';
|
||||
import type { FC } from 'react';
|
||||
import type { TLoginLayoutContext } from '~/common';
|
||||
import { Spinner, Button } from '~/components';
|
||||
import type { FC } from 'react';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const BodyTextWrapper: FC<{ children: ReactNode }> = ({ children }) => {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { Spinner, Button } from '@librechat/client';
|
||||
import { useOutletContext } from 'react-router-dom';
|
||||
import { useNavigate, useSearchParams } from 'react-router-dom';
|
||||
import { useResetPasswordMutation } from 'librechat-data-provider/react-query';
|
||||
import type { TResetPassword } from 'librechat-data-provider';
|
||||
import type { TLoginLayoutContext } from '~/common';
|
||||
import { Spinner, Button } from '~/components';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
function ResetPassword() {
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
DiscordIcon,
|
||||
AppleIcon,
|
||||
SamlIcon,
|
||||
} from '~/components';
|
||||
} from '@librechat/client';
|
||||
|
||||
import SocialButton from './SocialButton';
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import React, { useState, useCallback } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom';
|
||||
import { useToastContext } from '@librechat/client';
|
||||
import { useForm, Controller } from 'react-hook-form';
|
||||
import { REGEXP_ONLY_DIGITS, REGEXP_ONLY_DIGITS_AND_CHARS } from 'input-otp';
|
||||
import { InputOTP, InputOTPGroup, InputOTPSeparator, InputOTPSlot, Label } from '~/components';
|
||||
import { InputOTP, InputOTPGroup, InputOTPSeparator, InputOTPSlot, Label } from '@librechat/client';
|
||||
import { useVerifyTwoFactorTempMutation } from '~/data-provider';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
interface VerifyPayload {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { useSearchParams, useNavigate } from 'react-router-dom';
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||
import { Spinner, ThemeSelector } from '@librechat/client';
|
||||
import { useSearchParams, useNavigate } from 'react-router-dom';
|
||||
import { useVerifyEmailMutation, useResendVerificationEmail } from '~/data-provider';
|
||||
import { ThemeSelector } from '~/components/ui';
|
||||
import { Spinner } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
function RequestPasswordReset() {
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import React, { useRef, Dispatch, SetStateAction } from 'react';
|
||||
import { TConversationTag } from 'librechat-data-provider';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { OGDialogTemplate, OGDialog, Button, Spinner, useToastContext } from '@librechat/client';
|
||||
import { useConversationTagMutation } from '~/data-provider';
|
||||
import { OGDialog, Button, Spinner } from '~/components';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import BookmarkForm from './BookmarkForm';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { logger } from '~/utils';
|
||||
|
||||
@@ -2,11 +2,10 @@ import React, { useEffect } from 'react';
|
||||
import { QueryKeys } from 'librechat-data-provider';
|
||||
import { Controller, useForm } from 'react-hook-form';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { Checkbox, Label, TextareaAutosize, Input, useToastContext } from '@librechat/client';
|
||||
import type { TConversationTag, TConversationTagRequest } from 'librechat-data-provider';
|
||||
import { Checkbox, Label, TextareaAutosize, Input } from '~/components';
|
||||
import { useBookmarkContext } from '~/Providers/BookmarkContext';
|
||||
import { useConversationTagMutation } from '~/data-provider';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn, logger } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { useState } from 'react';
|
||||
import { Spinner } from '@librechat/client';
|
||||
import { MenuItem } from '@headlessui/react';
|
||||
import { BookmarkFilledIcon, BookmarkIcon } from '@radix-ui/react-icons';
|
||||
import type { FC } from 'react';
|
||||
import { Spinner } from '~/components/svg';
|
||||
|
||||
type MenuItemProps = {
|
||||
tag: string | React.ReactNode;
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import {
|
||||
Button,
|
||||
TrashIcon,
|
||||
Label,
|
||||
OGDialog,
|
||||
OGDialogTrigger,
|
||||
TooltipAnchor,
|
||||
OGDialogTemplate,
|
||||
useToastContext,
|
||||
} from '@librechat/client';
|
||||
import type { FC } from 'react';
|
||||
import { Button, TrashIcon, Label, OGDialog, OGDialogTrigger, TooltipAnchor } from '~/components';
|
||||
import { useDeleteConversationTagMutation } from '~/data-provider';
|
||||
import OGDialogTemplate from '~/components/ui/OGDialogTemplate';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
const DeleteBookmarkButton: FC<{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useState } from 'react';
|
||||
import type { FC } from 'react';
|
||||
import { TooltipAnchor, OGDialogTrigger, EditIcon, Button } from '@librechat/client';
|
||||
import type { TConversationTag } from 'librechat-data-provider';
|
||||
import { TooltipAnchor, OGDialogTrigger, EditIcon, Button } from '~/components';
|
||||
import type { FC } from 'react';
|
||||
import BookmarkEditDialog from './BookmarkEditDialog';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { PlusCircle } from 'lucide-react';
|
||||
import { TooltipAnchor } from '@librechat/client';
|
||||
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type { TConversation } from 'librechat-data-provider';
|
||||
import { useChatContext, useAddedChatContext } from '~/Providers';
|
||||
import { TooltipAnchor } from '~/components';
|
||||
import { mainTextareaId } from '~/common';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { memo, useCallback } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import { Spinner } from '@librechat/client';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import type { TMessage } from 'librechat-data-provider';
|
||||
@@ -10,7 +11,6 @@ import { useChatHelpers, useAddedResponse, useSSE } from '~/hooks';
|
||||
import ConversationStarters from './Input/ConversationStarters';
|
||||
import { useGetMessagesByConvoId } from '~/data-provider';
|
||||
import MessagesView from './Messages/MessagesView';
|
||||
import { Spinner } from '~/components/svg';
|
||||
import Presentation from './Presentation';
|
||||
import { buildTree, cn } from '~/utils';
|
||||
import ChatForm from './Input/ChatForm';
|
||||
|
||||
@@ -2,11 +2,11 @@ import { useState, useId, useRef } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { Upload, Share2 } from 'lucide-react';
|
||||
import { DropdownPopup, TooltipAnchor, useMediaQuery } from '@librechat/client';
|
||||
import type * as t from '~/common';
|
||||
import ExportModal from '~/components/Nav/ExportConversation/ExportModal';
|
||||
import { ShareButton } from '~/components/Conversations/ConvoOptions';
|
||||
import { DropdownPopup, TooltipAnchor } from '~/components/ui';
|
||||
import { useMediaQuery, useLocalize } from '~/hooks';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import store from '~/store';
|
||||
|
||||
export default function ExportAndShareMenu({
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useMediaQuery } from '@librechat/client';
|
||||
import { useOutletContext } from 'react-router-dom';
|
||||
import { getConfigDefaults, PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
import type { ContextType } from '~/common';
|
||||
@@ -6,10 +7,10 @@ import ModelSelector from './Menus/Endpoints/ModelSelector';
|
||||
import { PresetsMenu, HeaderNewChat, OpenSidebar } from './Menus';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
import ExportAndShareMenu from './ExportAndShareMenu';
|
||||
import { useMediaQuery, useHasAccess } from '~/hooks';
|
||||
import BookmarkMenu from './Menus/BookmarkMenu';
|
||||
import { TemporaryChat } from './TemporaryChat';
|
||||
import AddMultiConvo from './AddMultiConvo';
|
||||
import { useHasAccess } from '~/hooks';
|
||||
|
||||
const defaultInterface = getConfigDefaults().interface;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import React, { memo, useState, useCallback, useMemo } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { CheckboxButton } from '@librechat/client';
|
||||
import { ArtifactModes } from 'librechat-data-provider';
|
||||
import { WandSparkles, ChevronDown } from 'lucide-react';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import React from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { PinIcon } from '@librechat/client';
|
||||
import { ChevronRight, WandSparkles } from 'lucide-react';
|
||||
import { ArtifactModes } from 'librechat-data-provider';
|
||||
import { PinIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useChatFormContext, useToastContext } from '~/Providers';
|
||||
import { ListeningIcon, Spinner } from '~/components/svg';
|
||||
import { useToastContext, TooltipAnchor, ListeningIcon, Spinner } from '@librechat/client';
|
||||
import { useLocalize, useSpeechToText } from '~/hooks';
|
||||
import { TooltipAnchor } from '~/components/ui';
|
||||
import { useChatFormContext } from '~/Providers';
|
||||
import { globalAudioId } from '~/common';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import React, {
|
||||
useReducer,
|
||||
useCallback,
|
||||
} from 'react';
|
||||
import { Badge } from '@librechat/client';
|
||||
import { useRecoilValue, useRecoilCallback } from 'recoil';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import CodeInterpreter from './CodeInterpreter';
|
||||
@@ -15,7 +16,6 @@ import { BadgeRowProvider } from '~/Providers';
|
||||
import ToolsDropdown from './ToolsDropdown';
|
||||
import type { BadgeItem } from '~/common';
|
||||
import { useChatBadges } from '~/hooks';
|
||||
import { Badge } from '~/components/ui';
|
||||
import ToolDialogs from './ToolDialogs';
|
||||
import FileSearch from './FileSearch';
|
||||
import Artifacts from './Artifacts';
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { memo, useRef, useMemo, useEffect, useState, useCallback } from 'react';
|
||||
import { useWatch } from 'react-hook-form';
|
||||
import { TextareaAutosize } from '@librechat/client';
|
||||
import { useRecoilState, useRecoilValue } from 'recoil';
|
||||
import { Constants, isAssistantsEndpoint, isAgentsEndpoint } from 'librechat-data-provider';
|
||||
import {
|
||||
@@ -20,7 +21,6 @@ import {
|
||||
import { mainTextareaId, BadgeItem } from '~/common';
|
||||
import AttachFileChat from './Files/AttachFileChat';
|
||||
import FileFormChat from './Files/FileFormChat';
|
||||
import { TextareaAutosize } from '~/components';
|
||||
import { cn, removeFocusRings } from '~/utils';
|
||||
import TextareaHeader from './TextareaHeader';
|
||||
import PromptsCommand from './PromptsCommand';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { CircleIcon, CircleDotsIcon } from '~/components/svg';
|
||||
import { ECallState } from 'librechat-data-provider';
|
||||
import { CircleIcon, CircleDotsIcon } from '@librechat/client';
|
||||
|
||||
const CircleRender = ({ rmsLevel, isCameraOn, state }) => {
|
||||
const getIconComponent = (state) => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { memo } from 'react';
|
||||
import { TerminalSquareIcon } from 'lucide-react';
|
||||
import { CheckboxButton } from '@librechat/client';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import { useLocalize, useHasAccess } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { TooltipAnchor } from '@librechat/client';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { TooltipAnchor } from '~/components/ui';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { Edit3, Check, X } from 'lucide-react';
|
||||
import { Button, Badge } from '@librechat/client';
|
||||
import type { LucideIcon } from 'lucide-react';
|
||||
import type { BadgeItem } from '~/common';
|
||||
import { useChatBadges, useLocalize } from '~/hooks';
|
||||
import { Button, Badge } from '~/components/ui';
|
||||
|
||||
interface EditBadgesProps {
|
||||
isEditingChatBadges: boolean;
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
import React, { memo } from 'react';
|
||||
import CheckboxButton from '~/components/ui/CheckboxButton';
|
||||
import { CheckboxButton, VectorIcon } from '@librechat/client';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
import { useLocalize, useHasAccess } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { VectorIcon } from '~/components/svg';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
function FileSearch() {
|
||||
const localize = useLocalize();
|
||||
const { fileSearch } = useBadgeRowContext();
|
||||
const { toggleState: fileSearchEnabled, debouncedChange, isPinned } = fileSearch;
|
||||
|
||||
const canUseFileSearch = useHasAccess({
|
||||
permissionType: PermissionTypes.FILE_SEARCH,
|
||||
permission: Permissions.USE,
|
||||
});
|
||||
|
||||
if (!canUseFileSearch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{(fileSearchEnabled || isPinned) && (
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useRef } from 'react';
|
||||
import { FileUpload, TooltipAnchor, AttachmentIcon } from '~/components';
|
||||
import { FileUpload, TooltipAnchor, AttachmentIcon } from '@librechat/client';
|
||||
import { useLocalize, useFileHandling } from '~/hooks';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '@librechat/client';
|
||||
import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import type { EndpointFileConfig } from 'librechat-data-provider';
|
||||
import { useLocalize, useGetAgentsConfig, useFileHandling, useAgentCapabilities } from '~/hooks';
|
||||
import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '~/components';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import { OGDialog, OGDialogTemplate } from '@librechat/client';
|
||||
import { ImageUpIcon, FileSearch, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { EToolResources, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import { FileSearch, ImageUpIcon, FileType2Icon, TerminalSquareIcon } from 'lucide-react';
|
||||
import { useLocalize, useGetAgentsConfig, useAgentCapabilities } from '~/hooks';
|
||||
import { OGDialog, OGDialogTemplate } from '~/components/ui';
|
||||
|
||||
interface DragDropModalProps {
|
||||
onOptionSelect: (option: EToolResources | undefined) => void;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Spinner } from '@librechat/client';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import type { ExtendedFile } from '~/common';
|
||||
import FileIcon from '~/components/svg/Files/FileIcon';
|
||||
import { Spinner } from '~/components';
|
||||
import { FileIcon } from '~/components/svg';
|
||||
import SourceIcon from './SourceIcon';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useToastContext } from '@librechat/client';
|
||||
import { EToolResources } from 'librechat-data-provider';
|
||||
import type { ExtendedFile } from '~/common';
|
||||
import { useDeleteFilesMutation } from '~/data-provider';
|
||||
import { useToastContext } from '~/Providers';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { useFileDeletion } from '~/hooks/Files';
|
||||
import FileContainer from './FileContainer';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { logger } from '~/utils';
|
||||
import Image from './Image';
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { FileSources, FileContext } from 'librechat-data-provider';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import { OGDialog, OGDialogContent, OGDialogHeader, OGDialogTitle } from '~/components';
|
||||
import { OGDialog, OGDialogContent, OGDialogHeader, OGDialogTitle } from '@librechat/client';
|
||||
import { useGetFiles } from '~/data-provider';
|
||||
import { DataTable, columns } from './Table';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { Maximize2 } from 'lucide-react';
|
||||
import { OGDialog, OGDialogContent } from '~/components/ui';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import { OGDialog, OGDialogContent } from '@librechat/client';
|
||||
import ProgressCircle from './ProgressCircle';
|
||||
import SourceIcon from './SourceIcon';
|
||||
import { cn } from '~/utils';
|
||||
@@ -93,9 +93,9 @@ const ImagePreview = ({
|
||||
|
||||
const style: styleProps = imageUrl
|
||||
? {
|
||||
...baseStyle,
|
||||
backgroundImage: `url(${imageUrl})`,
|
||||
}
|
||||
...baseStyle,
|
||||
backgroundImage: `url(${imageUrl})`,
|
||||
}
|
||||
: baseStyle;
|
||||
|
||||
if (typeof style.backgroundImage !== 'string' || style.backgroundImage.length === 0) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user