Compare commits
132 Commits
style/mark
...
v0.8.0-rc3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e1ad235f17 | ||
|
|
4a0b329e3e | ||
|
|
a22359de5e | ||
|
|
bbfe4002eb | ||
|
|
94426a3cae | ||
|
|
e559f0f4dc | ||
|
|
15c9c7e1f4 | ||
|
|
ac641e7cba | ||
|
|
1915d7b195 | ||
|
|
c2f4b383f2 | ||
|
|
939af59950 | ||
|
|
7d08da1a8a | ||
|
|
543b617e1c | ||
|
|
c827fdd10e | ||
|
|
ac608ded46 | ||
|
|
0e00f357a6 | ||
|
|
c465d7b732 | ||
|
|
aba0a93d1d | ||
|
|
a49b2b2833 | ||
|
|
e0ebb7097e | ||
|
|
9a79635012 | ||
|
|
ce19abc968 | ||
|
|
49cd3894aa | ||
|
|
da4aa37493 | ||
|
|
5a14ee9c6a | ||
|
|
3394aa5030 | ||
|
|
cee0579e0e | ||
|
|
d6c173c94b | ||
|
|
beff848a3f | ||
|
|
b9bc3123d6 | ||
|
|
639c7ad6ad | ||
|
|
822e2310ce | ||
|
|
2a0a8f6beb | ||
|
|
a6fd32a15a | ||
|
|
80a1a57fde | ||
|
|
3576391482 | ||
|
|
55557f7cc8 | ||
|
|
d7d02766ea | ||
|
|
627f0bffe5 | ||
|
|
8d1d95371f | ||
|
|
8bcdc041b2 | ||
|
|
9b6395d955 | ||
|
|
ad1503abdc | ||
|
|
a6d7ebf22e | ||
|
|
cebf140bce | ||
|
|
cc0cf359a2 | ||
|
|
3547873bc4 | ||
|
|
50b7bd6643 | ||
|
|
81186312ef | ||
|
|
4ec7bcb60f | ||
|
|
c78fd0fc83 | ||
|
|
d711fc7852 | ||
|
|
6af7efd0f4 | ||
|
|
d57e7aec73 | ||
|
|
e4e25aaf2b | ||
|
|
e8ddd279fd | ||
|
|
b742c8c7f9 | ||
|
|
803ade8601 | ||
|
|
dcd96c29c5 | ||
|
|
53c31b85d0 | ||
|
|
d07c2b3475 | ||
|
|
a434d28579 | ||
|
|
d82a63642d | ||
|
|
9585db14ba | ||
|
|
c191af6c9b | ||
|
|
39346d6b8e | ||
|
|
28d63dab71 | ||
|
|
49d1cefe71 | ||
|
|
0262c25989 | ||
|
|
90b037a67f | ||
|
|
fc8fd489d6 | ||
|
|
81b32e400a | ||
|
|
ae732b2ebc | ||
|
|
7e7e75714e | ||
|
|
ff54cbffd9 | ||
|
|
74e029e78f | ||
|
|
75324e1c7e | ||
|
|
949682ef0f | ||
|
|
66bd419baa | ||
|
|
aa42759ffd | ||
|
|
52e59e40be | ||
|
|
a955097faf | ||
|
|
b6413b06bc | ||
|
|
e6cebdf2b6 | ||
|
|
3eb6debe6a | ||
|
|
8780a78165 | ||
|
|
9dbf153489 | ||
|
|
4799593e1a | ||
|
|
a199b87478 | ||
|
|
007570b5c6 | ||
|
|
5943d5346c | ||
|
|
052e61b735 | ||
|
|
1ccac58403 | ||
|
|
04d74a7e07 | ||
|
|
0fdca8ddbd | ||
|
|
c5ca621efd | ||
|
|
8cefa566da | ||
|
|
7e4c8a5d0d | ||
|
|
edf33bedcb | ||
|
|
21e00168b1 | ||
|
|
da3730b7d6 | ||
|
|
770c766d50 | ||
|
|
5eb6926464 | ||
|
|
e478ae1c28 | ||
|
|
0c9284c8ae | ||
|
|
4eeadddfe6 | ||
|
|
9ca1847535 | ||
|
|
5d0bc95193 | ||
|
|
e7d6100fe4 | ||
|
|
01a95229f2 | ||
|
|
0939250f07 | ||
|
|
7147bce3c3 | ||
|
|
486fe34a2b | ||
|
|
922f43f520 | ||
|
|
e6fa01d514 | ||
|
|
8238fb49e0 | ||
|
|
430557676d | ||
|
|
8a5047c456 | ||
|
|
c787515894 | ||
|
|
d95d8032cc | ||
|
|
b9f72f4869 | ||
|
|
429bb6653a | ||
|
|
47caafa8f8 | ||
|
|
8530594f37 | ||
|
|
0b071c06f6 | ||
|
|
1092392ed8 | ||
|
|
36c8947029 | ||
|
|
4175a3ea19 | ||
|
|
02dc71f4b7 | ||
|
|
a6c99a3267 | ||
|
|
fcefc6eedf | ||
|
|
dfdafdbd09 |
15
.env.example
15
.env.example
@@ -456,6 +456,8 @@ OPENID_REQUIRED_ROLE_PARAMETER_PATH=
|
||||
OPENID_USERNAME_CLAIM=
|
||||
# Set to determine which user info property returned from OpenID Provider to store as the User's name
|
||||
OPENID_NAME_CLAIM=
|
||||
# Optional audience parameter for OpenID authorization requests
|
||||
OPENID_AUDIENCE=
|
||||
|
||||
OPENID_BUTTON_LABEL=
|
||||
OPENID_IMAGE_URL=
|
||||
@@ -740,3 +742,16 @@ OPENWEATHER_API_KEY=
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
|
||||
#======================#
|
||||
# MCP Configuration #
|
||||
#======================#
|
||||
|
||||
# Treat 401/403 responses as OAuth requirement when no oauth metadata found
|
||||
# MCP_OAUTH_ON_AUTH_ERROR=true
|
||||
|
||||
# Timeout for OAuth detection requests in milliseconds
|
||||
# MCP_OAUTH_DETECTION_TIMEOUT=5000
|
||||
|
||||
# Cache connection status checks for this many milliseconds to avoid expensive verification
|
||||
# MCP_CONNECTION_CHECK_TTL=60000
|
||||
|
||||
4
.github/CONTRIBUTING.md
vendored
4
.github/CONTRIBUTING.md
vendored
@@ -147,7 +147,7 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
||||
## 8. Module Import Conventions
|
||||
|
||||
- `npm` packages first,
|
||||
- from shortest line (top) to longest (bottom)
|
||||
- from longest line (top) to shortest (bottom)
|
||||
|
||||
- Followed by typescript types (pertains to data-provider and client workspaces)
|
||||
- longest line (top) to shortest (bottom)
|
||||
@@ -157,6 +157,8 @@ Apply the following naming conventions to branches, labels, and other Git-relate
|
||||
- longest line (top) to shortest (bottom)
|
||||
- imports with alias `~` treated the same as relative import with respect to line length
|
||||
|
||||
**Note:** ESLint will automatically enforce these import conventions when you run `npm run lint --fix` or through pre-commit hooks.
|
||||
|
||||
---
|
||||
|
||||
Please ensure that you adapt this summary to fit the specific context and nuances of your project.
|
||||
|
||||
12
.github/workflows/data-provider.yml
vendored
12
.github/workflows/data-provider.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Node.js Package
|
||||
name: Publish `librechat-data-provider` to NPM
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -6,6 +6,12 @@ on:
|
||||
- main
|
||||
paths:
|
||||
- 'packages/data-provider/package.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
description: 'Reason for manual trigger'
|
||||
required: false
|
||||
default: 'Manual publish requested'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -14,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 20
|
||||
- run: cd packages/data-provider && npm ci
|
||||
- run: cd packages/data-provider && npm run build
|
||||
|
||||
@@ -25,7 +31,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 16
|
||||
node-version: 20
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: cd packages/data-provider && npm ci
|
||||
- run: cd packages/data-provider && npm run build
|
||||
|
||||
53
.github/workflows/helmcharts.yml
vendored
53
.github/workflows/helmcharts.yml
vendored
@@ -4,12 +4,13 @@ name: Build Helm Charts on Tag
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
- "chart-*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@@ -26,15 +27,49 @@ jobs:
|
||||
uses: azure/setup-helm@v4
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Build Subchart Deps
|
||||
run: |
|
||||
cd helm/librechat-rag-api
|
||||
helm dependency build
|
||||
cd helm/librechat
|
||||
helm dependency build
|
||||
cd ../librechat-rag-api
|
||||
helm dependency build
|
||||
|
||||
- name: Run chart-releaser
|
||||
uses: helm/chart-releaser-action@v1.6.0
|
||||
- name: Get Chart Version
|
||||
id: chart-version
|
||||
run: |
|
||||
CHART_VERSION=$(echo "${{ github.ref_name }}" | cut -d'-' -f2)
|
||||
echo "CHART_VERSION=${CHART_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Log in to GitHub Container Registry
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
charts_dir: helm
|
||||
skip_existing: true
|
||||
env:
|
||||
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Run Helm OCI Charts Releaser
|
||||
# This is for the librechat chart
|
||||
- name: Release Helm OCI Charts for librechat
|
||||
uses: appany/helm-oci-chart-releaser@v0.4.2
|
||||
with:
|
||||
name: librechat
|
||||
repository: ${{ github.actor }}/librechat-chart
|
||||
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
|
||||
path: helm/librechat
|
||||
registry: ghcr.io
|
||||
registry_username: ${{ github.actor }}
|
||||
registry_password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# this is for the librechat-rag-api chart
|
||||
- name: Release Helm OCI Charts for librechat-rag-api
|
||||
uses: appany/helm-oci-chart-releaser@v0.4.2
|
||||
with:
|
||||
name: librechat-rag-api
|
||||
repository: ${{ github.actor }}/librechat-chart
|
||||
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
|
||||
path: helm/librechat-rag-api
|
||||
registry: ghcr.io
|
||||
registry_username: ${{ github.actor }}
|
||||
registry_password: ${{ secrets.GITHUB_TOKEN }}
|
||||
33
.github/workflows/i18n-unused-keys.yml
vendored
33
.github/workflows/i18n-unused-keys.yml
vendored
@@ -1,5 +1,10 @@
|
||||
name: Detect Unused i18next Strings
|
||||
|
||||
# This workflow checks for unused i18n keys in translation files.
|
||||
# It has special handling for:
|
||||
# - com_ui_special_var_* keys that are dynamically constructed
|
||||
# - com_agents_category_* keys that are stored in the database and used dynamically
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
@@ -7,6 +12,7 @@ on:
|
||||
- "api/**"
|
||||
- "packages/data-provider/src/**"
|
||||
- "packages/client/**"
|
||||
- "packages/data-schemas/src/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
@@ -24,7 +30,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client" "packages/data-schemas/src")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
@@ -52,6 +58,31 @@ jobs:
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
# Special case for agent category keys that are dynamically used from database
|
||||
elif [[ "$KEY" == com_agents_category_* ]]; then
|
||||
# Check if agent category localization is being used
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
# Check for dynamic category label/description usage
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -E "category\.(label|description).*startsWith.*['\"]com_" "$DIR" > /dev/null 2>&1 || \
|
||||
# Check for the method that defines these keys
|
||||
grep -r --include=\*.{js,jsx,ts,tsx} "ensureDefaultCategories" "$DIR" > /dev/null 2>&1 || \
|
||||
# Check for direct usage in agentCategory.ts
|
||||
grep -r --include=\*.ts -E "label:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1 || \
|
||||
grep -r --include=\*.ts -E "description:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -13,6 +13,9 @@ pids
|
||||
*.seed
|
||||
.git
|
||||
|
||||
# CI/CD data
|
||||
test-image*
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
@@ -134,3 +137,4 @@ helm/**/.values.yaml
|
||||
/.openai/
|
||||
/.tabnine/
|
||||
/.codeium
|
||||
*.local.md
|
||||
|
||||
17
Dockerfile
17
Dockerfile
@@ -1,4 +1,4 @@
|
||||
# v0.7.9
|
||||
# v0.8.0-rc3
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
@@ -19,7 +19,12 @@ WORKDIR /app
|
||||
|
||||
USER node
|
||||
|
||||
COPY --chown=node:node . .
|
||||
COPY --chown=node:node package.json package-lock.json ./
|
||||
COPY --chown=node:node api/package.json ./api/package.json
|
||||
COPY --chown=node:node client/package.json ./client/package.json
|
||||
COPY --chown=node:node packages/data-provider/package.json ./packages/data-provider/package.json
|
||||
COPY --chown=node:node packages/data-schemas/package.json ./packages/data-schemas/package.json
|
||||
COPY --chown=node:node packages/api/package.json ./packages/api/package.json
|
||||
|
||||
RUN \
|
||||
# Allow mounting of these files, which have no default
|
||||
@@ -29,7 +34,11 @@ RUN \
|
||||
npm config set fetch-retry-maxtimeout 600000 ; \
|
||||
npm config set fetch-retries 5 ; \
|
||||
npm config set fetch-retry-mintimeout 15000 ; \
|
||||
npm install --no-audit; \
|
||||
npm ci --no-audit
|
||||
|
||||
COPY --chown=node:node . .
|
||||
|
||||
RUN \
|
||||
# React client build
|
||||
NODE_OPTIONS="--max-old-space-size=2048" npm run frontend; \
|
||||
npm prune --production; \
|
||||
@@ -47,4 +56,4 @@ CMD ["npm", "run", "backend"]
|
||||
# WORKDIR /usr/share/nginx/html
|
||||
# COPY --from=node /app/client/dist /usr/share/nginx/html
|
||||
# COPY client/nginx.conf /etc/nginx/conf.d/default.conf
|
||||
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||
# ENTRYPOINT ["nginx", "-g", "daemon off;"]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.9
|
||||
# v0.8.0-rc3
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
15
README.md
15
README.md
@@ -65,8 +65,10 @@
|
||||
|
||||
- 🔦 **Agents & Tools Integration**:
|
||||
- **[LibreChat Agents](https://www.librechat.ai/docs/features/agents)**:
|
||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers without coding
|
||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers
|
||||
- Agent Marketplace: Discover and deploy community-built agents
|
||||
- Collaborative Sharing: Share agents with specific users and groups
|
||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API, and more
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
|
||||
@@ -87,15 +89,18 @@
|
||||
- Create, Save, & Share Custom Presets
|
||||
- Switch between AI Endpoints and Presets mid-chat
|
||||
- Edit, Resubmit, and Continue Messages with Conversation branching
|
||||
- Create and share prompts with specific users and groups
|
||||
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
||||
|
||||
- 💬 **Multimodal & File Interactions**:
|
||||
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
||||
|
||||
- 🌎 **Multilingual UI**:
|
||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro
|
||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||
- 🌎 **Multilingual UI**:
|
||||
- English, 中文 (简体), 中文 (繁體), العربية, Deutsch, Español, Français, Italiano
|
||||
- Polski, Português (PT), Português (BR), Русский, 日本語, Svenska, 한국어, Tiếng Việt
|
||||
- Türkçe, Nederlands, עברית, Català, Čeština, Dansk, Eesti, فارسی
|
||||
- Suomi, Magyar, Հայերեն, Bahasa Indonesia, ქართული, Latviešu, ไทย, ئۇيغۇرچە
|
||||
|
||||
- 🧠 **Reasoning UI**:
|
||||
- Dynamic Reasoning UI for Chain-of-Thought/Reasoning AI models like DeepSeek-R1
|
||||
|
||||
@@ -37,6 +37,8 @@ class BaseClient {
|
||||
this.conversationId;
|
||||
/** @type {string} */
|
||||
this.responseMessageId;
|
||||
/** @type {string} */
|
||||
this.parentMessageId;
|
||||
/** @type {TAttachment[]} */
|
||||
this.attachments;
|
||||
/** The key for the usage object's input tokens
|
||||
@@ -185,7 +187,8 @@ class BaseClient {
|
||||
this.user = user;
|
||||
const saveOptions = this.getSaveOptions();
|
||||
this.abortController = opts.abortController ?? new AbortController();
|
||||
const conversationId = overrideConvoId ?? opts.conversationId ?? crypto.randomUUID();
|
||||
const requestConvoId = overrideConvoId ?? opts.conversationId;
|
||||
const conversationId = requestConvoId ?? crypto.randomUUID();
|
||||
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
||||
const userMessageId =
|
||||
overrideUserMessageId ?? opts.overrideParentMessageId ?? crypto.randomUUID();
|
||||
@@ -210,11 +213,12 @@ class BaseClient {
|
||||
...opts,
|
||||
user,
|
||||
head,
|
||||
saveOptions,
|
||||
userMessageId,
|
||||
requestConvoId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
userMessageId,
|
||||
responseMessageId,
|
||||
saveOptions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -233,11 +237,12 @@ class BaseClient {
|
||||
const {
|
||||
user,
|
||||
head,
|
||||
saveOptions,
|
||||
userMessageId,
|
||||
requestConvoId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
userMessageId,
|
||||
responseMessageId,
|
||||
saveOptions,
|
||||
} = await this.setMessageOptions(opts);
|
||||
|
||||
const userMessage = opts.isEdited
|
||||
@@ -259,7 +264,8 @@ class BaseClient {
|
||||
}
|
||||
|
||||
if (typeof opts?.onStart === 'function') {
|
||||
opts.onStart(userMessage, responseMessageId);
|
||||
const isNewConvo = !requestConvoId && parentMessageId === Constants.NO_PARENT;
|
||||
opts.onStart(userMessage, responseMessageId, isNewConvo);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -614,15 +620,19 @@ class BaseClient {
|
||||
this.currentMessages.push(userMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* When the userMessage is pushed to currentMessages, the parentMessage is the userMessageId.
|
||||
* this only matters when buildMessages is utilizing the parentMessageId, and may vary on implementation
|
||||
*/
|
||||
const parentMessageId = isEdited ? head : userMessage.messageId;
|
||||
this.parentMessageId = parentMessageId;
|
||||
let {
|
||||
prompt: payload,
|
||||
tokenCountMap,
|
||||
promptTokens,
|
||||
} = await this.buildMessages(
|
||||
this.currentMessages,
|
||||
// When the userMessage is pushed to currentMessages, the parentMessage is the userMessageId.
|
||||
// this only matters when buildMessages is utilizing the parentMessageId, and may vary on implementation
|
||||
isEdited ? head : userMessage.messageId,
|
||||
parentMessageId,
|
||||
this.getBuildMessagesOptions(opts),
|
||||
opts,
|
||||
);
|
||||
|
||||
@@ -653,8 +653,10 @@ class OpenAIClient extends BaseClient {
|
||||
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
configOptions.baseOptions = {
|
||||
headers: resolveHeaders({
|
||||
...headers,
|
||||
...configOptions?.baseOptions?.headers,
|
||||
headers: {
|
||||
...headers,
|
||||
...configOptions?.baseOptions?.headers,
|
||||
},
|
||||
}),
|
||||
};
|
||||
}
|
||||
@@ -749,7 +751,7 @@ class OpenAIClient extends BaseClient {
|
||||
groupMap,
|
||||
});
|
||||
|
||||
this.options.headers = resolveHeaders(headers);
|
||||
this.options.headers = resolveHeaders({ headers });
|
||||
this.options.reverseProxyUrl = baseURL ?? null;
|
||||
this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl);
|
||||
this.apiKey = azureOptions.azureOpenAIApiKey;
|
||||
@@ -1181,7 +1183,7 @@ ${convo}
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
opts.defaultHeaders = resolveHeaders(headers);
|
||||
opts.defaultHeaders = resolveHeaders({ headers });
|
||||
this.langchainProxy = extractBaseURL(baseURL);
|
||||
this.apiKey = azureOptions.azureOpenAIApiKey;
|
||||
|
||||
@@ -1222,7 +1224,9 @@ ${convo}
|
||||
}
|
||||
|
||||
if (this.isOmni === true && modelOptions.max_tokens != null) {
|
||||
modelOptions.max_completion_tokens = modelOptions.max_tokens;
|
||||
const paramName =
|
||||
modelOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
|
||||
modelOptions[paramName] = modelOptions.max_tokens;
|
||||
delete modelOptions.max_tokens;
|
||||
}
|
||||
if (this.isOmni === true && modelOptions.temperature != null) {
|
||||
|
||||
@@ -245,7 +245,7 @@ describe('AnthropicClient', () => {
|
||||
});
|
||||
|
||||
describe('Claude 4 model headers', () => {
|
||||
it('should add "prompt-caching" beta header for claude-sonnet-4 model', () => {
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
@@ -255,10 +255,30 @@ describe('AnthropicClient', () => {
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model formats', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const modelOptions = { model };
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-opus-4 model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
@@ -273,20 +293,6 @@ describe('AnthropicClient', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-sonnet model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
model: 'claude-4-sonnet-20250514',
|
||||
};
|
||||
client.setOptions({ modelOptions, promptCache: true });
|
||||
const anthropicClient = client.getClient(modelOptions);
|
||||
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" beta header for claude-4-opus model', () => {
|
||||
const client = new AnthropicClient('test-api-key');
|
||||
const modelOptions = {
|
||||
|
||||
@@ -579,6 +579,8 @@ describe('BaseClient', () => {
|
||||
expect(onStart).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ text: 'Hello, world!' }),
|
||||
expect.any(String),
|
||||
/** `isNewConvo` */
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { mcpToolPattern, loadWebSearchAuth } = require('@librechat/api');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const { Tools, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const { Tools, Constants, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const {
|
||||
availableTools,
|
||||
manifestToolMap,
|
||||
@@ -24,9 +24,9 @@ const {
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { createMCPTool, createMCPTools } = require('~/server/services/MCP');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
|
||||
/**
|
||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||
@@ -123,6 +123,8 @@ const getAuthFields = (toolKey) => {
|
||||
*
|
||||
* @param {object} object
|
||||
* @param {string} object.user
|
||||
* @param {Record<string, Record<string, string>>} [object.userMCPAuthMap]
|
||||
* @param {AbortSignal} [object.signal]
|
||||
* @param {Pick<Agent, 'id' | 'provider' | 'model'>} [object.agent]
|
||||
* @param {string} [object.model]
|
||||
* @param {EModelEndpoint} [object.endpoint]
|
||||
@@ -137,7 +139,9 @@ const loadTools = async ({
|
||||
user,
|
||||
agent,
|
||||
model,
|
||||
signal,
|
||||
endpoint,
|
||||
userMCPAuthMap,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
@@ -231,6 +235,7 @@ const loadTools = async ({
|
||||
/** @type {Record<string, string>} */
|
||||
const toolContextMap = {};
|
||||
const cachedTools = (await getCachedTools({ userId: user, includeGlobal: true })) ?? {};
|
||||
const requestedMCPTools = {};
|
||||
|
||||
for (const tool of tools) {
|
||||
if (tool === Tools.execute_code) {
|
||||
@@ -299,14 +304,35 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
};
|
||||
continue;
|
||||
} else if (tool && cachedTools && mcpToolPattern.test(tool)) {
|
||||
requestedTools[tool] = async () =>
|
||||
const [toolName, serverName] = tool.split(Constants.mcp_delimiter);
|
||||
if (toolName === Constants.mcp_all) {
|
||||
const currentMCPGenerator = async (index) =>
|
||||
createMCPTools({
|
||||
req: options.req,
|
||||
res: options.res,
|
||||
index,
|
||||
serverName,
|
||||
userMCPAuthMap,
|
||||
model: agent?.model ?? model,
|
||||
provider: agent?.provider ?? endpoint,
|
||||
signal,
|
||||
});
|
||||
requestedMCPTools[serverName] = [currentMCPGenerator];
|
||||
continue;
|
||||
}
|
||||
const currentMCPGenerator = async (index) =>
|
||||
createMCPTool({
|
||||
index,
|
||||
req: options.req,
|
||||
res: options.res,
|
||||
toolKey: tool,
|
||||
userMCPAuthMap,
|
||||
model: agent?.model ?? model,
|
||||
provider: agent?.provider ?? endpoint,
|
||||
signal,
|
||||
});
|
||||
requestedMCPTools[serverName] = requestedMCPTools[serverName] || [];
|
||||
requestedMCPTools[serverName].push(currentMCPGenerator);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -346,6 +372,34 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
}
|
||||
|
||||
const loadedTools = (await Promise.all(toolPromises)).flatMap((plugin) => plugin || []);
|
||||
const mcpToolPromises = [];
|
||||
/** MCP server tools are initialized sequentially by server */
|
||||
let index = -1;
|
||||
for (const [serverName, generators] of Object.entries(requestedMCPTools)) {
|
||||
index++;
|
||||
for (const generator of generators) {
|
||||
try {
|
||||
if (generator && generators.length === 1) {
|
||||
mcpToolPromises.push(
|
||||
generator(index).catch((error) => {
|
||||
logger.error(`Error loading ${serverName} tools:`, error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const mcpTool = await generator(index);
|
||||
if (Array.isArray(mcpTool)) {
|
||||
loadedTools.push(...mcpTool);
|
||||
} else if (mcpTool) {
|
||||
loadedTools.push(mcpTool);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error loading MCP tool for server ${serverName}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
loadedTools.push(...(await Promise.all(mcpToolPromises)).flatMap((plugin) => plugin || []));
|
||||
return { loadedTools, toolContextMap };
|
||||
};
|
||||
|
||||
|
||||
3
api/cache/cacheConfig.js
vendored
3
api/cache/cacheConfig.js
vendored
@@ -52,7 +52,8 @@ const cacheConfig = {
|
||||
REDIS_CONNECT_TIMEOUT: math(process.env.REDIS_CONNECT_TIMEOUT, 10000),
|
||||
/** Queue commands when disconnected */
|
||||
REDIS_ENABLE_OFFLINE_QUEUE: isEnabled(process.env.REDIS_ENABLE_OFFLINE_QUEUE ?? 'true'),
|
||||
|
||||
/** Enable redis cluster without the need of multiple URIs */
|
||||
USE_REDIS_CLUSTER: isEnabled(process.env.USE_REDIS_CLUSTER ?? 'false'),
|
||||
CI: isEnabled(process.env.CI),
|
||||
DEBUG_MEMORY_CACHE: isEnabled(process.env.DEBUG_MEMORY_CACHE),
|
||||
|
||||
|
||||
33
api/cache/cacheConfig.spec.js
vendored
33
api/cache/cacheConfig.spec.js
vendored
@@ -14,6 +14,7 @@ describe('cacheConfig', () => {
|
||||
delete process.env.REDIS_KEY_PREFIX_VAR;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
@@ -101,6 +102,38 @@ describe('cacheConfig', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_REDIS_CLUSTER configuration', () => {
|
||||
test('should default to false when USE_REDIS_CLUSTER is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be false when USE_REDIS_CLUSTER is set to false', () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be true when USE_REDIS_CLUSTER is set to true', () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
});
|
||||
|
||||
test('should work with USE_REDIS enabled and REDIS_URI set', () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
expect(cacheConfig.USE_REDIS).toBe(true);
|
||||
expect(cacheConfig.REDIS_URI).toBe('redis://localhost:6379');
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_CA file reading', () => {
|
||||
test('should be null when REDIS_CA is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
|
||||
1
api/cache/getLogStores.js
vendored
1
api/cache/getLogStores.js
vendored
@@ -31,7 +31,6 @@ const namespaces = {
|
||||
[CacheKeys.SAML_SESSION]: sessionCache(CacheKeys.SAML_SESSION),
|
||||
|
||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||
[CacheKeys.MCP_TOOLS]: standardCache(CacheKeys.MCP_TOOLS),
|
||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||
|
||||
45
api/cache/redisClients.js
vendored
45
api/cache/redisClients.js
vendored
@@ -38,7 +38,7 @@ if (cacheConfig.USE_REDIS) {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
logger.warn('ioredis reconnecting due to READONLY error');
|
||||
return true;
|
||||
return 2; // Return retry delay instead of boolean
|
||||
}
|
||||
return false;
|
||||
},
|
||||
@@ -48,26 +48,29 @@ if (cacheConfig.USE_REDIS) {
|
||||
};
|
||||
|
||||
ioredisClient =
|
||||
urls.length === 1
|
||||
urls.length === 1 && !cacheConfig.USE_REDIS_CLUSTER
|
||||
? new IoRedis(cacheConfig.REDIS_URI, redisOptions)
|
||||
: new IoRedis.Cluster(cacheConfig.REDIS_URI, {
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis cluster giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis cluster reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
: new IoRedis.Cluster(
|
||||
urls.map((url) => ({ host: url.hostname, port: parseInt(url.port, 10) || 6379 })),
|
||||
{
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis cluster giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis cluster reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
});
|
||||
);
|
||||
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error('ioredis client error:', err);
|
||||
@@ -145,10 +148,10 @@ if (cacheConfig.USE_REDIS) {
|
||||
};
|
||||
|
||||
keyvRedisClient =
|
||||
urls.length === 1
|
||||
urls.length === 1 && !cacheConfig.USE_REDIS_CLUSTER
|
||||
? createClient({ url: cacheConfig.REDIS_URI, ...redisOptions })
|
||||
: createCluster({
|
||||
rootNodes: cacheConfig.REDIS_URI.split(',').map((url) => ({ url })),
|
||||
rootNodes: urls.map((url) => ({ url: url.href })),
|
||||
defaults: redisOptions,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,27 +1,13 @@
|
||||
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
||||
const logger = require('./winston');
|
||||
|
||||
global.EventSource = EventSource;
|
||||
|
||||
/** @type {MCPManager} */
|
||||
let mcpManager = null;
|
||||
let flowManager = null;
|
||||
|
||||
/**
|
||||
* @param {string} [userId] - Optional user ID, to avoid disconnecting the current user.
|
||||
* @returns {MCPManager}
|
||||
*/
|
||||
function getMCPManager(userId) {
|
||||
if (!mcpManager) {
|
||||
mcpManager = MCPManager.getInstance();
|
||||
} else {
|
||||
mcpManager.checkIdleConnections(userId);
|
||||
}
|
||||
return mcpManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Keyv} flowsCache
|
||||
* @returns {FlowStateManager}
|
||||
@@ -37,6 +23,7 @@ function getFlowStateManager(flowsCache) {
|
||||
|
||||
module.exports = {
|
||||
logger,
|
||||
getMCPManager,
|
||||
createMCPManager: MCPManager.createInstance,
|
||||
getMCPManager: MCPManager.getInstance,
|
||||
getFlowStateManager,
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ const mongoose = require('mongoose');
|
||||
const crypto = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_all, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
const {
|
||||
removeAgentFromAllProjects,
|
||||
@@ -78,6 +78,7 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
const addedServers = new Set();
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
if (!toolName.includes(mcp_delimiter)) {
|
||||
@@ -85,9 +86,17 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
}
|
||||
const mcpServer = toolName.split(mcp_delimiter)?.[1];
|
||||
if (mcpServer && mcpServers.has(mcpServer)) {
|
||||
addedServers.add(mcpServer);
|
||||
tools.push(toolName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const mcpServer of mcpServers) {
|
||||
if (addedServers.has(mcpServer)) {
|
||||
continue;
|
||||
}
|
||||
tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
|
||||
}
|
||||
}
|
||||
|
||||
const instructions = req.body.promptPrefix;
|
||||
@@ -364,17 +373,10 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
|
||||
if (shouldCreateVersion) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
||||
if (duplicateVersion && !forceVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
// No changes detected, return the current agent without creating a new version
|
||||
const agentObj = currentAgent.toObject();
|
||||
agentObj.version = versions.length;
|
||||
return agentObj;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -540,11 +542,7 @@ const getListAgentsByAccess = async ({
|
||||
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||
|
||||
// Build base query combining ACL accessible agents with other filters
|
||||
const baseQuery = { ...otherParams };
|
||||
|
||||
if (accessibleIds.length > 0) {
|
||||
baseQuery._id = { $in: accessibleIds };
|
||||
}
|
||||
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||
|
||||
// Add cursor condition
|
||||
if (after) {
|
||||
|
||||
@@ -22,6 +22,7 @@ const {
|
||||
updateAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
getListAgentsByAccess,
|
||||
revertAgentVersion,
|
||||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
@@ -941,45 +942,31 @@ describe('models/Agent', () => {
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
test('should not create new version for duplicate updates', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const testCases = generateVersionTestCases();
|
||||
|
||||
try {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const testCases = generateVersionTestCases();
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
const updatedAgent = await updateAgent({ id: testAgentId }, testCase.update);
|
||||
expect(updatedAgent.versions).toHaveLength(2); // No new version created
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
// Update with duplicate data should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1155,20 +1142,13 @@ describe('models/Agent', () => {
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
// Update without forceVersion and no changes should not create a version
|
||||
let error;
|
||||
try {
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
|
||||
{ updatingUserId: authorId.toString(), forceVersion: false },
|
||||
);
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(duplicateUpdate.versions).toHaveLength(3); // No new version created
|
||||
});
|
||||
|
||||
test('should handle isDuplicateVersion with arrays containing null/undefined values', async () => {
|
||||
@@ -1366,18 +1346,21 @@ describe('models/Agent', () => {
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
expect(secondUpdate.support_contact.email).toBe('updated@support.com');
|
||||
|
||||
// Try to update with same support_contact - should be detected as duplicate
|
||||
await expect(
|
||||
updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'Updated Support',
|
||||
email: 'updated@support.com',
|
||||
},
|
||||
// Try to update with same support_contact - should be detected as duplicate but return successfully
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'Updated Support',
|
||||
email: 'updated@support.com',
|
||||
},
|
||||
),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
},
|
||||
);
|
||||
|
||||
// Should not create a new version
|
||||
expect(duplicateUpdate.versions).toHaveLength(3);
|
||||
expect(duplicateUpdate.version).toBe(3);
|
||||
expect(duplicateUpdate.support_contact.email).toBe('updated@support.com');
|
||||
});
|
||||
|
||||
test('should handle support_contact from empty to populated', async () => {
|
||||
@@ -1561,18 +1544,22 @@ describe('models/Agent', () => {
|
||||
expect(updated.support_contact.name).toBe('New Name');
|
||||
expect(updated.support_contact.email).toBe('');
|
||||
|
||||
// Verify isDuplicateVersion works with partial changes
|
||||
await expect(
|
||||
updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'New Name',
|
||||
email: '',
|
||||
},
|
||||
// Verify isDuplicateVersion works with partial changes - should return successfully without creating new version
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
support_contact: {
|
||||
name: 'New Name',
|
||||
email: '',
|
||||
},
|
||||
),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
},
|
||||
);
|
||||
|
||||
// Should not create a new version since content is the same
|
||||
expect(duplicateUpdate.versions).toHaveLength(2);
|
||||
expect(duplicateUpdate.version).toBe(2);
|
||||
expect(duplicateUpdate.support_contact.name).toBe('New Name');
|
||||
expect(duplicateUpdate.support_contact.email).toBe('');
|
||||
});
|
||||
|
||||
// Edge Cases
|
||||
@@ -2792,11 +2779,18 @@ describe('models/Agent', () => {
|
||||
agent_ids: ['agent1', 'agent2'],
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] });
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
|
||||
);
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
|
||||
await expect(
|
||||
updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] }),
|
||||
).rejects.toThrow('Duplicate version');
|
||||
// Update with same agent_ids should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
|
||||
);
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
});
|
||||
|
||||
test('should handle agent_ids field alongside other fields', async () => {
|
||||
@@ -2935,9 +2929,10 @@ describe('models/Agent', () => {
|
||||
expect(updated.versions).toHaveLength(2);
|
||||
expect(updated.agent_ids).toEqual([]);
|
||||
|
||||
await expect(updateAgent({ id: agentId }, { agent_ids: [] })).rejects.toThrow(
|
||||
'Duplicate version',
|
||||
);
|
||||
// Update with same empty agent_ids should succeed but not create a new version
|
||||
const duplicateUpdate = await updateAgent({ id: agentId }, { agent_ids: [] });
|
||||
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
|
||||
expect(duplicateUpdate.agent_ids).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle agent without agent_ids field', async () => {
|
||||
@@ -3047,6 +3042,212 @@ describe('Support Contact Field', () => {
|
||||
// Verify support_contact is undefined when not provided
|
||||
expect(agent.support_contact).toBeUndefined();
|
||||
});
|
||||
|
||||
describe('getListAgentsByAccess - Security Tests', () => {
|
||||
let userA, userB;
|
||||
let agentA1, agentA2, agentA3;
|
||||
|
||||
beforeEach(async () => {
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
await Agent.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
|
||||
// Create two users
|
||||
userA = new mongoose.Types.ObjectId();
|
||||
userB = new mongoose.Types.ObjectId();
|
||||
|
||||
// Create agents for user A
|
||||
agentA1 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
|
||||
agentA2 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
|
||||
agentA3 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
});
|
||||
|
||||
test('should return empty list when user has no accessible agents (empty accessibleIds)', async () => {
|
||||
// User B has no agents and no shared agents
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: [],
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
expect(result.first_id).toBeNull();
|
||||
expect(result.last_id).toBeNull();
|
||||
});
|
||||
|
||||
test('should not return other users agents when accessibleIds is empty', async () => {
|
||||
// User B trying to list agents with empty accessibleIds should not see User A's agents
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: [],
|
||||
otherParams: { author: userB },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should only return agents in accessibleIds list', async () => {
|
||||
// Give User B access to only one of User A's agents
|
||||
const accessibleIds = [agentA1._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentA1.id);
|
||||
expect(result.data[0].name).toBe('Agent A1');
|
||||
});
|
||||
|
||||
test('should return multiple accessible agents when provided', async () => {
|
||||
// Give User B access to two of User A's agents
|
||||
const accessibleIds = [agentA1._id, agentA3._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(2);
|
||||
const returnedIds = result.data.map((agent) => agent.id);
|
||||
expect(returnedIds).toContain(agentA1.id);
|
||||
expect(returnedIds).toContain(agentA3.id);
|
||||
expect(returnedIds).not.toContain(agentA2.id);
|
||||
});
|
||||
|
||||
test('should respect other query parameters while enforcing accessibleIds', async () => {
|
||||
// Give access to all agents but filter by name
|
||||
const accessibleIds = [agentA1._id, agentA2._id, agentA3._id];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: { name: 'Agent A2' },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentA2.id);
|
||||
});
|
||||
|
||||
test('should handle pagination correctly with accessibleIds filter', async () => {
|
||||
// Create more agents
|
||||
const moreAgents = [];
|
||||
for (let i = 4; i <= 10; i++) {
|
||||
const agent = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
});
|
||||
moreAgents.push(agent);
|
||||
}
|
||||
|
||||
// Give access to all agents
|
||||
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
|
||||
|
||||
// First page
|
||||
const page1 = await getListAgentsByAccess({
|
||||
accessibleIds: allAgentIds,
|
||||
otherParams: {},
|
||||
limit: 5,
|
||||
});
|
||||
|
||||
expect(page1.data).toHaveLength(5);
|
||||
expect(page1.has_more).toBe(true);
|
||||
expect(page1.after).toBeTruthy();
|
||||
|
||||
// Second page
|
||||
const page2 = await getListAgentsByAccess({
|
||||
accessibleIds: allAgentIds,
|
||||
otherParams: {},
|
||||
limit: 5,
|
||||
after: page1.after,
|
||||
});
|
||||
|
||||
expect(page2.data).toHaveLength(5);
|
||||
expect(page2.has_more).toBe(false);
|
||||
|
||||
// Verify no overlap between pages
|
||||
const page1Ids = page1.data.map((a) => a.id);
|
||||
const page2Ids = page2.data.map((a) => a.id);
|
||||
const intersection = page1Ids.filter((id) => page2Ids.includes(id));
|
||||
expect(intersection).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('should return empty list when accessibleIds contains non-existent IDs', async () => {
|
||||
// Try with non-existent agent IDs
|
||||
const fakeIds = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
|
||||
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: fakeIds,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should handle undefined accessibleIds as empty array', async () => {
|
||||
// When accessibleIds is undefined, it should be treated as empty array
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds: undefined,
|
||||
otherParams: {},
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(0);
|
||||
expect(result.has_more).toBe(false);
|
||||
});
|
||||
|
||||
test('should combine accessibleIds with author filter correctly', async () => {
|
||||
// Create an agent for User B
|
||||
const agentB1 = await createAgent({
|
||||
id: `agent_${uuidv4().slice(0, 12)}`,
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userB,
|
||||
});
|
||||
|
||||
// Give User B access to one of User A's agents
|
||||
const accessibleIds = [agentA1._id, agentB1._id];
|
||||
|
||||
// Filter by author should further restrict the results
|
||||
const result = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
otherParams: { author: userB },
|
||||
});
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].id).toBe(agentB1.id);
|
||||
expect(result.data[0].author).toBe(userB.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function createBasicAgent(overrides = {}) {
|
||||
|
||||
@@ -247,10 +247,130 @@ const deletePromptGroup = async ({ _id, author, role }) => {
|
||||
return { message: 'Prompt group deleted successfully' };
|
||||
};
|
||||
|
||||
/**
|
||||
* Get prompt groups by accessible IDs with optional cursor-based pagination.
|
||||
* @param {Object} params - The parameters for getting accessible prompt groups.
|
||||
* @param {Array} [params.accessibleIds] - Array of prompt group ObjectIds the user has ACL access to.
|
||||
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
||||
* @param {number} [params.limit] - Number of prompt groups to return (max 100). If not provided, returns all prompt groups.
|
||||
* @param {string} [params.after] - Cursor for pagination - get prompt groups after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
||||
* @returns {Promise<Object>} A promise that resolves to an object containing the prompt groups data and pagination info.
|
||||
*/
|
||||
async function getListPromptGroupsByAccess({
|
||||
accessibleIds = [],
|
||||
otherParams = {},
|
||||
limit = null,
|
||||
after = null,
|
||||
}) {
|
||||
const isPaginated = limit !== null && limit !== undefined;
|
||||
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
||||
|
||||
// Build base query combining ACL accessible prompt groups with other filters
|
||||
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||
|
||||
// Add cursor condition
|
||||
if (after) {
|
||||
try {
|
||||
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||
const { updatedAt, _id } = cursor;
|
||||
|
||||
const cursorCondition = {
|
||||
$or: [
|
||||
{ updatedAt: { $lt: new Date(updatedAt) } },
|
||||
{ updatedAt: new Date(updatedAt), _id: { $gt: new ObjectId(_id) } },
|
||||
],
|
||||
};
|
||||
|
||||
// Merge cursor condition with base query
|
||||
if (Object.keys(baseQuery).length > 0) {
|
||||
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
||||
// Remove the original conditions from baseQuery to avoid duplication
|
||||
Object.keys(baseQuery).forEach((key) => {
|
||||
if (key !== '$and') delete baseQuery[key];
|
||||
});
|
||||
} else {
|
||||
Object.assign(baseQuery, cursorCondition);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Invalid cursor:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Build aggregation pipeline
|
||||
const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
|
||||
|
||||
// Only apply limit if pagination is requested
|
||||
if (isPaginated) {
|
||||
pipeline.push({ $limit: normalizedLimit + 1 });
|
||||
}
|
||||
|
||||
// Add lookup for production prompt
|
||||
pipeline.push(
|
||||
{
|
||||
$lookup: {
|
||||
from: 'prompts',
|
||||
localField: 'productionId',
|
||||
foreignField: '_id',
|
||||
as: 'productionPrompt',
|
||||
},
|
||||
},
|
||||
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
|
||||
{
|
||||
$project: {
|
||||
name: 1,
|
||||
numberOfGenerations: 1,
|
||||
oneliner: 1,
|
||||
category: 1,
|
||||
projectIds: 1,
|
||||
productionId: 1,
|
||||
author: 1,
|
||||
authorName: 1,
|
||||
createdAt: 1,
|
||||
updatedAt: 1,
|
||||
'productionPrompt.prompt': 1,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const promptGroups = await PromptGroup.aggregate(pipeline).exec();
|
||||
|
||||
const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
|
||||
const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
|
||||
(group) => {
|
||||
if (group.author) {
|
||||
group.author = group.author.toString();
|
||||
}
|
||||
return group;
|
||||
},
|
||||
);
|
||||
|
||||
// Generate next cursor only if paginated
|
||||
let nextCursor = null;
|
||||
if (isPaginated && hasMore && data.length > 0) {
|
||||
const lastGroup = promptGroups[normalizedLimit - 1];
|
||||
nextCursor = Buffer.from(
|
||||
JSON.stringify({
|
||||
updatedAt: lastGroup.updatedAt.toISOString(),
|
||||
_id: lastGroup._id.toString(),
|
||||
}),
|
||||
).toString('base64');
|
||||
}
|
||||
|
||||
return {
|
||||
object: 'list',
|
||||
data,
|
||||
first_id: data.length > 0 ? data[0]._id.toString() : null,
|
||||
last_id: data.length > 0 ? data[data.length - 1]._id.toString() : null,
|
||||
has_more: hasMore,
|
||||
after: nextCursor,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getPromptGroups,
|
||||
deletePromptGroup,
|
||||
getAllPromptGroups,
|
||||
getListPromptGroupsByAccess,
|
||||
/**
|
||||
* Create a prompt and its respective group
|
||||
* @param {TCreatePromptRecord} saveData
|
||||
|
||||
@@ -16,7 +16,7 @@ const { Role } = require('~/db/models');
|
||||
*
|
||||
* @param {string} roleName - The name of the role to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<Object>} A plain object representing the role document.
|
||||
* @returns {Promise<IRole>} Role document.
|
||||
*/
|
||||
const getRoleByName = async function (roleName, fieldsToSelect = null) {
|
||||
const cache = getLogStores(CacheKeys.ROLES);
|
||||
@@ -72,8 +72,9 @@ const updateRoleByName = async function (roleName, updates) {
|
||||
* Updates access permissions for a specific role and multiple permission types.
|
||||
* @param {string} roleName - The role to update.
|
||||
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
|
||||
* @param {IRole} [roleData] - Optional role data to use instead of fetching from the database.
|
||||
*/
|
||||
async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
|
||||
// Filter and clean the permission updates based on our schema definition.
|
||||
const updates = {};
|
||||
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
|
||||
@@ -86,7 +87,7 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
}
|
||||
|
||||
try {
|
||||
const role = await getRoleByName(roleName);
|
||||
const role = roleData ?? (await getRoleByName(roleName));
|
||||
if (!role) {
|
||||
return;
|
||||
}
|
||||
@@ -113,7 +114,6 @@ async function updateAccessPermissions(roleName, permissionsUpdate) {
|
||||
}
|
||||
}
|
||||
|
||||
// Process the current updates
|
||||
for (const [permissionType, permissions] of Object.entries(updates)) {
|
||||
const currentTypePermissions = currentPermissions[permissionType] || {};
|
||||
updatedPermissions[permissionType] = { ...currentTypePermissions };
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { matchModelName } = require('../utils');
|
||||
const { matchModelName } = require('../utils/tokens');
|
||||
const defaultRate = 6;
|
||||
|
||||
/**
|
||||
@@ -87,6 +87,9 @@ const tokenValues = Object.assign(
|
||||
'gpt-4.1': { prompt: 2, completion: 8 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-5': { prompt: 1.25, completion: 10 },
|
||||
'gpt-5-mini': { prompt: 0.25, completion: 2 },
|
||||
'gpt-5-nano': { prompt: 0.05, completion: 0.4 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
|
||||
'gpt-4-1106': { prompt: 10, completion: 30 },
|
||||
@@ -147,6 +150,9 @@ const tokenValues = Object.assign(
|
||||
codestral: { prompt: 0.3, completion: 0.9 },
|
||||
'ministral-8b': { prompt: 0.1, completion: 0.1 },
|
||||
'ministral-3b': { prompt: 0.04, completion: 0.04 },
|
||||
// GPT-OSS models
|
||||
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
|
||||
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
|
||||
},
|
||||
bedrockValues,
|
||||
);
|
||||
@@ -214,6 +220,12 @@ const getValueKey = (model, endpoint) => {
|
||||
return 'gpt-4.1';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-5-nano')) {
|
||||
return 'gpt-5-nano';
|
||||
} else if (modelName.includes('gpt-5-mini')) {
|
||||
return 'gpt-5-mini';
|
||||
} else if (modelName.includes('gpt-5')) {
|
||||
return 'gpt-5';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
return 'gpt-4o-mini';
|
||||
} else if (modelName.includes('gpt-4o')) {
|
||||
|
||||
@@ -25,8 +25,14 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4-some-other-info')).toBe('8k');
|
||||
});
|
||||
|
||||
it('should return undefined for model names that do not match any known patterns', () => {
|
||||
expect(getValueKey('gpt-5-some-other-info')).toBeUndefined();
|
||||
it('should return "gpt-5" for model name containing "gpt-5"', () => {
|
||||
expect(getValueKey('gpt-5-some-other-info')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
|
||||
});
|
||||
|
||||
it('should return "gpt-3.5-turbo-1106" for model name containing "gpt-3.5-turbo-1106"', () => {
|
||||
@@ -84,6 +90,29 @@ describe('getValueKey', () => {
|
||||
expect(getValueKey('gpt-4.1-nano-0125')).toBe('gpt-4.1-nano');
|
||||
});
|
||||
|
||||
it('should return "gpt-5" for model type of "gpt-5"', () => {
|
||||
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
|
||||
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
|
||||
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
|
||||
});
|
||||
|
||||
it('should return "gpt-5-mini" for model type of "gpt-5-mini"', () => {
|
||||
expect(getValueKey('gpt-5-mini-2025-01-30')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('openai/gpt-5-mini')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('gpt-5-mini-0130')).toBe('gpt-5-mini');
|
||||
expect(getValueKey('gpt-5-mini-2025-01-30-0130')).toBe('gpt-5-mini');
|
||||
});
|
||||
|
||||
it('should return "gpt-5-nano" for model type of "gpt-5-nano"', () => {
|
||||
expect(getValueKey('gpt-5-nano-2025-01-30')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('openai/gpt-5-nano')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('gpt-5-nano-0130')).toBe('gpt-5-nano');
|
||||
expect(getValueKey('gpt-5-nano-2025-01-30-0130')).toBe('gpt-5-nano');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
@@ -207,6 +236,48 @@ describe('getMultiplier', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5', () => {
|
||||
const valueKey = getValueKey('gpt-5-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5-mini', () => {
|
||||
const valueKey = getValueKey('gpt-5-mini-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-mini'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-mini'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-mini-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5-mini'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5-mini', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-mini'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-5-nano', () => {
|
||||
const valueKey = getValueKey('gpt-5-nano-2025-01-30');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-nano'].prompt);
|
||||
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-nano'].completion,
|
||||
);
|
||||
expect(getMultiplier({ model: 'gpt-5-nano-preview', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['gpt-5-nano'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'openai/gpt-5-nano', tokenType: 'completion' })).toBe(
|
||||
tokenValues['gpt-5-nano'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the correct multiplier for gpt-4o', () => {
|
||||
const valueKey = getValueKey('gpt-4o-2024-08-06');
|
||||
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);
|
||||
@@ -307,10 +378,22 @@ describe('getMultiplier', () => {
|
||||
});
|
||||
|
||||
it('should return defaultRate if derived valueKey does not match any known patterns', () => {
|
||||
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-5-some-other-info' })).toBe(
|
||||
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-10-some-other-info' })).toBe(
|
||||
defaultRate,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return correct multipliers for GPT-OSS models', () => {
|
||||
const models = ['gpt-oss-20b', 'gpt-oss-120b'];
|
||||
models.forEach((key) => {
|
||||
const expectedPrompt = tokenValues[key].prompt;
|
||||
const expectedCompletion = tokenValues[key].completion;
|
||||
expect(getMultiplier({ valueKey: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||
expect(getMultiplier({ valueKey: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||
expect(getMultiplier({ model: key, tokenType: 'prompt' })).toBe(expectedPrompt);
|
||||
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('AWS Bedrock Model Tests', () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.9",
|
||||
"version": "v0.8.0-rc3",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -49,12 +49,12 @@
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/openai": "^0.5.18",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.69",
|
||||
"@librechat/agents": "^2.4.76",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"bcryptjs": "^2.4.3",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('~/config');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
// WeakMap to hold temporary data associated with requests
|
||||
/** WeakMap to hold temporary data associated with requests */
|
||||
const requestDataMap = new WeakMap();
|
||||
|
||||
const FinalizationRegistry = global.FinalizationRegistry || null;
|
||||
@@ -23,7 +23,7 @@ const clientRegistry = FinalizationRegistry
|
||||
} else {
|
||||
logger.debug('[FinalizationRegistry] Cleaning up client');
|
||||
}
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// Ignore errors
|
||||
}
|
||||
})
|
||||
@@ -55,6 +55,9 @@ function disposeClient(client) {
|
||||
if (client.responseMessageId) {
|
||||
client.responseMessageId = null;
|
||||
}
|
||||
if (client.parentMessageId) {
|
||||
client.parentMessageId = null;
|
||||
}
|
||||
if (client.message_file_map) {
|
||||
client.message_file_map = null;
|
||||
}
|
||||
@@ -334,7 +337,7 @@ function disposeClient(client) {
|
||||
}
|
||||
}
|
||||
client.options = null;
|
||||
} catch (e) {
|
||||
} catch {
|
||||
// Ignore errors during disposal
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ const refreshController = async (req, res) => {
|
||||
}
|
||||
try {
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const user = await getUserById(payload.id, '-password -__v -totpSecret');
|
||||
const user = await getUserById(payload.id, '-password -__v -totpSecret -backupCodes');
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
const { logger } = require('~/config');
|
||||
|
||||
//handle duplicates
|
||||
const handleDuplicateKeyError = (err, res) => {
|
||||
logger.error('Duplicate key error:', err.keyValue);
|
||||
const field = `${JSON.stringify(Object.keys(err.keyValue))}`;
|
||||
const code = 409;
|
||||
res
|
||||
.status(code)
|
||||
.send({ messages: `An document with that ${field} already exists.`, fields: field });
|
||||
};
|
||||
|
||||
//handle validation errors
|
||||
const handleValidationError = (err, res) => {
|
||||
logger.error('Validation error:', err.errors);
|
||||
let errors = Object.values(err.errors).map((el) => el.message);
|
||||
let fields = `${JSON.stringify(Object.values(err.errors).map((el) => el.path))}`;
|
||||
let code = 400;
|
||||
if (errors.length > 1) {
|
||||
errors = errors.join(' ');
|
||||
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
|
||||
} else {
|
||||
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = (err, _req, res, _next) => {
|
||||
try {
|
||||
if (err.name === 'ValidationError') {
|
||||
return handleValidationError(err, res);
|
||||
}
|
||||
if (err.code && err.code == 11000) {
|
||||
return handleDuplicateKeyError(err, res);
|
||||
}
|
||||
// Special handling for errors like SyntaxError
|
||||
if (err.statusCode && err.body) {
|
||||
return res.status(err.statusCode).send(err.body);
|
||||
}
|
||||
|
||||
logger.error('ErrorController => error', err);
|
||||
return res.status(500).send('An unknown error occurred.');
|
||||
} catch (err) {
|
||||
logger.error('ErrorController => processing error', err);
|
||||
return res.status(500).send('Processing error in ErrorController.');
|
||||
}
|
||||
};
|
||||
@@ -5,6 +5,7 @@ const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @returns {Promise<TModelsConfig>} The models config.
|
||||
*/
|
||||
const getModelsConfig = async (req) => {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
|
||||
@@ -364,7 +364,7 @@ const getUserEffectivePermissions = async (req, res) => {
|
||||
*/
|
||||
const searchPrincipals = async (req, res) => {
|
||||
try {
|
||||
const { q: query, limit = 20, type } = req.query;
|
||||
const { q: query, limit = 20, types } = req.query;
|
||||
|
||||
if (!query || query.trim().length === 0) {
|
||||
return res.status(400).json({
|
||||
@@ -379,22 +379,34 @@ const searchPrincipals = async (req, res) => {
|
||||
}
|
||||
|
||||
const searchLimit = Math.min(Math.max(1, parseInt(limit) || 10), 50);
|
||||
const typeFilter = [PrincipalType.USER, PrincipalType.GROUP, PrincipalType.ROLE].includes(type)
|
||||
? type
|
||||
: null;
|
||||
|
||||
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilter);
|
||||
let typeFilters = null;
|
||||
if (types) {
|
||||
const typesArray = Array.isArray(types) ? types : types.split(',');
|
||||
const validTypes = typesArray.filter((t) =>
|
||||
[PrincipalType.USER, PrincipalType.GROUP, PrincipalType.ROLE].includes(t),
|
||||
);
|
||||
typeFilters = validTypes.length > 0 ? validTypes : null;
|
||||
}
|
||||
|
||||
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilters);
|
||||
let allPrincipals = [...localResults];
|
||||
|
||||
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
|
||||
|
||||
if (useEntraId && localResults.length < searchLimit) {
|
||||
try {
|
||||
const graphTypeMap = {
|
||||
user: 'users',
|
||||
group: 'groups',
|
||||
null: 'all',
|
||||
};
|
||||
let graphType = 'all';
|
||||
if (typeFilters && typeFilters.length === 1) {
|
||||
const graphTypeMap = {
|
||||
[PrincipalType.USER]: 'users',
|
||||
[PrincipalType.GROUP]: 'groups',
|
||||
};
|
||||
const mappedType = graphTypeMap[typeFilters[0]];
|
||||
if (mappedType) {
|
||||
graphType = mappedType;
|
||||
}
|
||||
}
|
||||
|
||||
const authHeader = req.headers.authorization;
|
||||
const accessToken =
|
||||
@@ -405,7 +417,7 @@ const searchPrincipals = async (req, res) => {
|
||||
accessToken,
|
||||
req.user.openidId,
|
||||
query.trim(),
|
||||
graphTypeMap[typeFilter],
|
||||
graphType,
|
||||
searchLimit - localResults.length,
|
||||
);
|
||||
|
||||
@@ -436,21 +448,22 @@ const searchPrincipals = async (req, res) => {
|
||||
_searchScore: calculateRelevanceScore(item, query.trim()),
|
||||
}));
|
||||
|
||||
allPrincipals = sortPrincipalsByRelevance(scoredResults)
|
||||
const finalResults = sortPrincipalsByRelevance(scoredResults)
|
||||
.slice(0, searchLimit)
|
||||
.map((result) => {
|
||||
const { _searchScore, ...resultWithoutScore } = result;
|
||||
return resultWithoutScore;
|
||||
});
|
||||
|
||||
res.status(200).json({
|
||||
query: query.trim(),
|
||||
limit: searchLimit,
|
||||
type: typeFilter,
|
||||
results: allPrincipals,
|
||||
count: allPrincipals.length,
|
||||
types: typeFilters,
|
||||
results: finalResults,
|
||||
count: finalResults.length,
|
||||
sources: {
|
||||
local: allPrincipals.filter((r) => r.source === 'local').length,
|
||||
entra: allPrincipals.filter((r) => r.source === 'entra').length,
|
||||
local: finalResults.filter((r) => r.source === 'local').length,
|
||||
entra: finalResults.filter((r) => r.source === 'entra').length,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,54 +1,23 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, AuthType, Constants } = require('librechat-data-provider');
|
||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const { getToolkitKey } = require('~/server/services/ToolService');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
getToolkitKey,
|
||||
checkPluginAuth,
|
||||
filterUniquePlugins,
|
||||
convertMCPToolToPlugin,
|
||||
convertMCPToolsToPlugins,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
getCachedTools,
|
||||
setCachedTools,
|
||||
mergeUserTools,
|
||||
getCustomConfig,
|
||||
} = require('~/server/services/Config');
|
||||
const { loadAndFormatTools } = require('~/server/services/ToolService');
|
||||
const { availableTools, toolkits } = require('~/app/clients/tools');
|
||||
const { getMCPManager } = require('~/config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* Filters out duplicate plugins from the list of plugins.
|
||||
*
|
||||
* @param {TPlugin[]} plugins The list of plugins to filter.
|
||||
* @returns {TPlugin[]} The list of plugins with duplicates removed.
|
||||
*/
|
||||
const filterUniquePlugins = (plugins) => {
|
||||
const seen = new Set();
|
||||
return plugins.filter((plugin) => {
|
||||
const duplicate = seen.has(plugin.pluginKey);
|
||||
seen.add(plugin.pluginKey);
|
||||
return !duplicate;
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines if a plugin is authenticated by checking if all required authentication fields have non-empty values.
|
||||
* Supports alternate authentication fields, allowing validation against multiple possible environment variables.
|
||||
*
|
||||
* @param {TPlugin} plugin The plugin object containing the authentication configuration.
|
||||
* @returns {boolean} True if the plugin is authenticated for all required fields, false otherwise.
|
||||
*/
|
||||
const checkPluginAuth = (plugin) => {
|
||||
if (!plugin.authConfig || plugin.authConfig.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return plugin.authConfig.every((authFieldObj) => {
|
||||
const authFieldOptions = authFieldObj.authField.split('||');
|
||||
let isFieldAuthenticated = false;
|
||||
|
||||
for (const fieldOption of authFieldOptions) {
|
||||
const envValue = process.env[fieldOption];
|
||||
if (envValue && envValue.trim() !== '' && envValue !== AuthType.USER_PROVIDED) {
|
||||
isFieldAuthenticated = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isFieldAuthenticated;
|
||||
});
|
||||
};
|
||||
|
||||
const getAvailablePluginsController = async (req, res) => {
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
@@ -60,6 +29,7 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
|
||||
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||
const { filteredTools = [], includedTools = [] } = req.app.locals;
|
||||
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||
const pluginManifest = availableTools;
|
||||
|
||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||
@@ -85,45 +55,6 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
function createServerToolsCallback() {
|
||||
/**
|
||||
* @param {string} serverName
|
||||
* @param {TPlugin[] | null} serverTools
|
||||
*/
|
||||
return async function (serverName, serverTools) {
|
||||
try {
|
||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||
if (!serverName || !mcpToolsCache) {
|
||||
return;
|
||||
}
|
||||
await mcpToolsCache.set(serverName, serverTools);
|
||||
logger.debug(`MCP tools for ${serverName} added to cache.`);
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving MCP tools from cache:', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createGetServerTools() {
|
||||
/**
|
||||
* Retrieves cached server tools
|
||||
* @param {string} serverName
|
||||
* @returns {Promise<TPlugin[] | null>}
|
||||
*/
|
||||
return async function (serverName) {
|
||||
try {
|
||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||
if (!mcpToolsCache) {
|
||||
return null;
|
||||
}
|
||||
return await mcpToolsCache.get(serverName);
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving MCP tools from cache:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
||||
*
|
||||
@@ -139,37 +70,70 @@ function createGetServerTools() {
|
||||
const getAvailableTools = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user?.id;
|
||||
if (!userId) {
|
||||
logger.warn('[getAvailableTools] User ID not found in request');
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
const customConfig = await getCustomConfig();
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||
const cachedUserTools = await getCachedTools({ userId });
|
||||
const userPlugins = convertMCPToolsToPlugins(cachedUserTools, customConfig);
|
||||
const userPlugins =
|
||||
cachedUserTools != null
|
||||
? convertMCPToolsToPlugins({ functionTools: cachedUserTools, customConfig })
|
||||
: undefined;
|
||||
|
||||
if (cachedToolsArray && userPlugins) {
|
||||
if (cachedToolsArray != null && userPlugins != null) {
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
|
||||
res.status(200).json(dedupedTools);
|
||||
return;
|
||||
}
|
||||
|
||||
// If not in cache, build from manifest
|
||||
let pluginManifest = availableTools;
|
||||
if (customConfig?.mcpServers != null) {
|
||||
const mcpManager = getMCPManager();
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
||||
const serverToolsCallback = createServerToolsCallback();
|
||||
const getServerTools = createGetServerTools();
|
||||
const mcpTools = await mcpManager.loadManifestTools({
|
||||
flowManager,
|
||||
serverToolsCallback,
|
||||
getServerTools,
|
||||
/** @type {Record<string, FunctionTool> | null} Get tool definitions to filter which tools are actually available */
|
||||
let toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||
let prelimCachedTools;
|
||||
|
||||
// TODO: this is a temp fix until app config is refactored
|
||||
if (!toolDefinitions) {
|
||||
toolDefinitions = loadAndFormatTools({
|
||||
adminFilter: req.app.locals?.filteredTools,
|
||||
adminIncluded: req.app.locals?.includedTools,
|
||||
directory: req.app.locals?.paths.structuredTools,
|
||||
});
|
||||
pluginManifest = [...mcpTools, ...pluginManifest];
|
||||
prelimCachedTools = toolDefinitions;
|
||||
}
|
||||
|
||||
/** @type {TPlugin[]} */
|
||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||
let pluginManifest = availableTools;
|
||||
if (customConfig?.mcpServers != null) {
|
||||
try {
|
||||
const mcpManager = getMCPManager();
|
||||
const mcpTools = await mcpManager.getAllToolFunctions(userId);
|
||||
prelimCachedTools = prelimCachedTools ?? {};
|
||||
for (const [toolKey, toolData] of Object.entries(mcpTools)) {
|
||||
const plugin = convertMCPToolToPlugin({
|
||||
toolKey,
|
||||
toolData,
|
||||
customConfig,
|
||||
});
|
||||
if (plugin) {
|
||||
pluginManifest.push(plugin);
|
||||
}
|
||||
prelimCachedTools[toolKey] = toolData;
|
||||
}
|
||||
await mergeUserTools({ userId, cachedUserTools, userTools: prelimCachedTools });
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[getAvailableTools] Error loading MCP Tools, servers may still be initializing:',
|
||||
error,
|
||||
);
|
||||
}
|
||||
} else if (prelimCachedTools != null) {
|
||||
await setCachedTools(prelimCachedTools, { isGlobal: true });
|
||||
}
|
||||
|
||||
/** @type {TPlugin[]} Deduplicate and authenticate plugins */
|
||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||
const authenticatedPlugins = uniquePlugins.map((plugin) => {
|
||||
if (checkPluginAuth(plugin)) {
|
||||
return { ...plugin, authenticated: true };
|
||||
@@ -178,14 +142,15 @@ const getAvailableTools = async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
const toolDefinitions = (await getCachedTools({ includeGlobal: true })) || {};
|
||||
|
||||
/** Filter plugins based on availability and add MCP-specific auth config */
|
||||
const toolsOutput = [];
|
||||
for (const plugin of authenticatedPlugins) {
|
||||
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
|
||||
const isToolkit =
|
||||
plugin.toolkit === true &&
|
||||
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey);
|
||||
Object.keys(toolDefinitions).some(
|
||||
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
|
||||
);
|
||||
|
||||
if (!isToolDefined && !isToolkit) {
|
||||
continue;
|
||||
@@ -193,41 +158,36 @@ const getAvailableTools = async (req, res) => {
|
||||
|
||||
const toolToAdd = { ...plugin };
|
||||
|
||||
if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
||||
toolsOutput.push(toolToAdd);
|
||||
continue;
|
||||
}
|
||||
if (plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
||||
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
|
||||
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
|
||||
if (!serverConfig?.customUserVars) {
|
||||
toolsOutput.push(toolToAdd);
|
||||
continue;
|
||||
}
|
||||
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
|
||||
if (customVarKeys.length === 0) {
|
||||
toolToAdd.authConfig = [];
|
||||
toolToAdd.authenticated = true;
|
||||
} else {
|
||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}));
|
||||
toolToAdd.authenticated = false;
|
||||
if (serverConfig?.customUserVars) {
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
if (customVarKeys.length === 0) {
|
||||
toolToAdd.authConfig = [];
|
||||
toolToAdd.authenticated = true;
|
||||
} else {
|
||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(
|
||||
([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}),
|
||||
);
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toolsOutput.push(toolToAdd);
|
||||
}
|
||||
|
||||
const finalTools = filterUniquePlugins(toolsOutput);
|
||||
await cache.set(CacheKeys.TOOLS, finalTools);
|
||||
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...finalTools]);
|
||||
|
||||
const dedupedTools = filterUniquePlugins([...(userPlugins ?? []), ...finalTools]);
|
||||
res.status(200).json(dedupedTools);
|
||||
} catch (error) {
|
||||
logger.error('[getAvailableTools]', error);
|
||||
@@ -235,58 +195,6 @@ const getAvailableTools = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts MCP function format tools to plugin format
|
||||
* @param {Object} functionTools - Object with function format tools
|
||||
* @param {Object} customConfig - Custom configuration for MCP servers
|
||||
* @returns {Array} Array of plugin objects
|
||||
*/
|
||||
function convertMCPToolsToPlugins(functionTools, customConfig) {
|
||||
const plugins = [];
|
||||
|
||||
for (const [toolKey, toolData] of Object.entries(functionTools)) {
|
||||
if (!toolData.function || !toolKey.includes(Constants.mcp_delimiter)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const functionData = toolData.function;
|
||||
const parts = toolKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
|
||||
const serverConfig = customConfig?.mcpServers?.[serverName];
|
||||
|
||||
const plugin = {
|
||||
name: parts[0], // Use the tool name without server suffix
|
||||
pluginKey: toolKey,
|
||||
description: functionData.description || '',
|
||||
authenticated: true,
|
||||
icon: serverConfig?.iconPath,
|
||||
};
|
||||
|
||||
// Build authConfig for MCP tools
|
||||
if (!serverConfig?.customUserVars) {
|
||||
plugin.authConfig = [];
|
||||
plugins.push(plugin);
|
||||
continue;
|
||||
}
|
||||
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
if (customVarKeys.length === 0) {
|
||||
plugin.authConfig = [];
|
||||
} else {
|
||||
plugin.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}));
|
||||
}
|
||||
|
||||
plugins.push(plugin);
|
||||
}
|
||||
|
||||
return plugins;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAvailableTools,
|
||||
getAvailablePluginsController,
|
||||
|
||||
@@ -13,61 +13,311 @@ jest.mock('@librechat/data-schemas', () => ({
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCustomConfig: jest.fn(),
|
||||
getCachedTools: jest.fn(),
|
||||
setCachedTools: jest.fn(),
|
||||
mergeUserTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/ToolService', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
loadAndFormatTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
getMCPManager: jest.fn(() => ({
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
loadAllManifestTools: jest.fn().mockResolvedValue([]),
|
||||
})),
|
||||
getFlowStateManager: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/app/clients/tools', () => ({
|
||||
availableTools: [],
|
||||
toolkits: [],
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(),
|
||||
}));
|
||||
|
||||
// Import the actual module with the function we want to test
|
||||
const { getAvailableTools } = require('./PluginController');
|
||||
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
|
||||
const { loadAndFormatTools } = require('~/server/services/ToolService');
|
||||
|
||||
describe('PluginController', () => {
|
||||
describe('plugin.icon behavior', () => {
|
||||
let mockReq, mockRes, mockCache;
|
||||
let mockReq, mockRes, mockCache;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockReq = {
|
||||
user: { id: 'test-user-id' },
|
||||
app: {
|
||||
locals: {
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
filteredTools: null,
|
||||
includedTools: null,
|
||||
},
|
||||
},
|
||||
};
|
||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
|
||||
// Clear availableTools and toolkits arrays before each test
|
||||
require('~/app/clients/tools').availableTools.length = 0;
|
||||
require('~/app/clients/tools').toolkits.length = 0;
|
||||
});
|
||||
|
||||
describe('getAvailablePluginsController', () => {
|
||||
beforeEach(() => {
|
||||
mockReq.app = { locals: { filteredTools: [], includedTools: [] } };
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
|
||||
// Add plugins with duplicates to availableTools
|
||||
const mockPlugins = [
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First duplicate' },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData).toHaveLength(2);
|
||||
expect(responseData[0].pluginKey).toBe('key1');
|
||||
expect(responseData[1].pluginKey).toBe('key2');
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify plugin authentication', async () => {
|
||||
// checkPluginAuth returns false for plugins without authConfig
|
||||
// so authenticated property won't be added
|
||||
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// checkPluginAuth returns false, so authenticated property is not added
|
||||
expect(responseData[0].authenticated).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return cached plugins when available', async () => {
|
||||
const cachedPlugins = [
|
||||
{ name: 'CachedPlugin', pluginKey: 'cached', description: 'Cached plugin' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(cachedPlugins);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
// When cache is hit, we return immediately without processing
|
||||
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
|
||||
});
|
||||
|
||||
it('should filter plugins based on includedTools', async () => {
|
||||
const mockPlugins = [
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||
mockReq.app.locals.includedTools = ['key1'];
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData).toHaveLength(1);
|
||||
expect(responseData[0].pluginKey).toBe('key1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAvailableTools', () => {
|
||||
it('should use convertMCPToolsToPlugins for user-specific MCP tools', async () => {
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}server1`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `tool1${Constants.mcp_delimiter}server1`,
|
||||
description: 'Tool 1',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// convertMCPToolsToPlugins should have converted the tool
|
||||
expect(responseData.length).toBeGreaterThan(0);
|
||||
const convertedTool = responseData.find(
|
||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}server1`,
|
||||
);
|
||||
expect(convertedTool).toBeDefined();
|
||||
expect(convertedTool.name).toBe('tool1');
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
|
||||
const mockUserTools = {
|
||||
'user-tool': {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'user-tool',
|
||||
description: 'User tool',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockCachedPlugins = [
|
||||
{ name: 'user-tool', pluginKey: 'user-tool', description: 'Duplicate user tool' },
|
||||
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(mockCachedPlugins);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// Should have deduplicated tools with same pluginKey
|
||||
const userToolCount = responseData.filter((tool) => tool.pluginKey === 'user-tool').length;
|
||||
expect(userToolCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify authentication status', async () => {
|
||||
// Add a plugin to availableTools that will be checked
|
||||
const mockPlugin = {
|
||||
name: 'Tool1',
|
||||
pluginKey: 'tool1',
|
||||
description: 'Tool 1',
|
||||
// No authConfig means checkPluginAuth returns false
|
||||
};
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock loadAndFormatTools to return tool definitions including our tool
|
||||
loadAndFormatTools.mockReturnValue({
|
||||
tool1: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'tool1',
|
||||
description: 'Tool 1',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
const tool = responseData.find((t) => t.pluginKey === 'tool1');
|
||||
expect(tool).toBeDefined();
|
||||
// checkPluginAuth returns false, so authenticated property is not added
|
||||
expect(tool.authenticated).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should use getToolkitKey for toolkit validation', async () => {
|
||||
const mockToolkit = {
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
description: 'Toolkit 1',
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||
|
||||
// Mock toolkits to have a mapping
|
||||
require('~/app/clients/tools').toolkits.push({
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
tools: ['toolkit1_function'],
|
||||
});
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock loadAndFormatTools to return tool definitions
|
||||
loadAndFormatTools.mockReturnValue({
|
||||
toolkit1_function: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'toolkit1_function',
|
||||
description: 'Toolkit function',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
const toolkit = responseData.find((t) => t.pluginKey === 'toolkit1');
|
||||
expect(toolkit).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('plugin.icon behavior', () => {
|
||||
const callGetAvailableToolsWithMCPServer = async (mcpServers) => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue({ mcpServers });
|
||||
|
||||
const functionTools = {
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: {
|
||||
function: { name: 'test-tool', description: 'A test tool' },
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
description: 'A test tool',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Mock the MCP manager to return tools
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue(functionTools),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Mock loadAndFormatTools to return empty object since these are MCP tools
|
||||
loadAndFormatTools.mockReturnValue({});
|
||||
|
||||
getCachedTools.mockResolvedValueOnce(functionTools);
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
return responseData.find((tool) => tool.name === 'test-tool');
|
||||
return responseData.find(
|
||||
(tool) => tool.pluginKey === `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockReq = { user: { id: 'test-user-id' } };
|
||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
});
|
||||
|
||||
it('should set plugin.icon when iconPath is defined', async () => {
|
||||
const mcpServers = {
|
||||
'test-server': {
|
||||
@@ -86,4 +336,228 @@ describe('PluginController', () => {
|
||||
expect(testTool.icon).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('helper function integration', () => {
|
||||
it('should properly handle MCP tools with custom user variables', async () => {
|
||||
const customConfig = {
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: {
|
||||
API_KEY: { title: 'API Key', description: 'Your API key' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Mock MCP tools returned by getAllToolFunctions
|
||||
const mcpToolFunctions = {
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Mock the MCP manager to return tools
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue(mcpToolFunctions),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(customConfig);
|
||||
|
||||
// First call returns user tools (empty in this case)
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Mock loadAndFormatTools to return empty object for MCP tools
|
||||
loadAndFormatTools.mockReturnValue({});
|
||||
|
||||
// Second call returns tool definitions including our MCP tool
|
||||
getCachedTools.mockResolvedValueOnce(mcpToolFunctions);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Find the MCP tool in the response
|
||||
const mcpTool = responseData.find(
|
||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}test-server`,
|
||||
);
|
||||
|
||||
// The actual implementation adds authConfig and sets authenticated to false when customUserVars exist
|
||||
expect(mcpTool).toBeDefined();
|
||||
expect(mcpTool.authConfig).toEqual([
|
||||
{ authField: 'API_KEY', label: 'API Key', description: 'Your API key' },
|
||||
]);
|
||||
expect(mcpTool.authenticated).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle error cases gracefully', async () => {
|
||||
mockCache.get.mockRejectedValue(new Error('Cache error'));
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({ message: 'Cache error' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases with undefined/null values', () => {
|
||||
it('should handle undefined cache gracefully', async () => {
|
||||
getLogStores.mockReturnValue(undefined);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
});
|
||||
|
||||
it('should handle null cachedTools and cachedUserTools', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock loadAndFormatTools to return empty object when getCachedTools returns null
|
||||
loadAndFormatTools.mockReturnValue({});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle null values gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
});
|
||||
|
||||
it('should handle when getCachedTools returns undefined', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock loadAndFormatTools to return empty object when getCachedTools returns undefined
|
||||
loadAndFormatTools.mockReturnValue({});
|
||||
|
||||
// Mock getCachedTools to return undefined for both calls
|
||||
getCachedTools.mockReset();
|
||||
getCachedTools.mockResolvedValueOnce(undefined).mockResolvedValueOnce(undefined);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle undefined values gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
});
|
||||
|
||||
it('should handle cachedToolsArray and userPlugins both being defined', async () => {
|
||||
const cachedTools = [{ name: 'CachedTool', pluginKey: 'cached-tool', description: 'Cached' }];
|
||||
// Use MCP delimiter for the user tool so convertMCPToolsToPlugins works
|
||||
const userTools = {
|
||||
[`user-tool${Constants.mcp_delimiter}server1`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `user-tool${Constants.mcp_delimiter}server1`,
|
||||
description: 'User tool',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(cachedTools);
|
||||
getCachedTools.mockResolvedValue(userTools);
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// Should have both cached and user tools
|
||||
expect(responseData.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
|
||||
it('should handle empty toolDefinitions object', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({});
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// With empty tool definitions, no tools should be in the final output
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should handle MCP tools without customUserVars', async () => {
|
||||
const customConfig = {
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
// No customUserVars defined
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
||||
function: { name: 'tool1', description: 'Tool 1' },
|
||||
},
|
||||
};
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCustomConfig.mockResolvedValue(customConfig);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData[0].authenticated).toBe(true);
|
||||
// The actual implementation doesn't set authConfig on tools without customUserVars
|
||||
expect(responseData[0].authConfig).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle req.app.locals with undefined filteredTools and includedTools', async () => {
|
||||
mockReq.app = { locals: {} };
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
});
|
||||
|
||||
it('should handle toolkit with undefined toolDefinitions keys', async () => {
|
||||
const mockToolkit = {
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
description: 'Toolkit 1',
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
// Ensure req.app.locals is properly mocked
|
||||
mockReq.app = {
|
||||
locals: {
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
},
|
||||
};
|
||||
|
||||
// Add the toolkit to availableTools
|
||||
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValue({});
|
||||
getCustomConfig.mockResolvedValue(null);
|
||||
|
||||
// Mock loadAndFormatTools to return an empty object when toolDefinitions is null
|
||||
loadAndFormatTools.mockReturnValue({});
|
||||
|
||||
// Mock getCachedTools second call to return null
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle null toolDefinitions gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -47,7 +47,7 @@ const verify2FA = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token, backupCode } = req.body;
|
||||
const user = await getUserById(userId);
|
||||
const user = await getUserById(userId, '_id totpSecret backupCodes');
|
||||
|
||||
if (!user || !user.totpSecret) {
|
||||
return res.status(400).json({ message: '2FA not initiated' });
|
||||
@@ -79,7 +79,7 @@ const confirm2FA = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token } = req.body;
|
||||
const user = await getUserById(userId);
|
||||
const user = await getUserById(userId, '_id totpSecret');
|
||||
|
||||
if (!user || !user.totpSecret) {
|
||||
return res.status(400).json({ message: '2FA not initiated' });
|
||||
@@ -99,10 +99,36 @@ const confirm2FA = async (req, res) => {
|
||||
|
||||
/**
|
||||
* Disable 2FA by clearing the stored secret and backup codes.
|
||||
* Requires verification with either TOTP token or backup code if 2FA is fully enabled.
|
||||
*/
|
||||
const disable2FA = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { token, backupCode } = req.body;
|
||||
const user = await getUserById(userId, '_id totpSecret backupCodes');
|
||||
|
||||
if (!user || !user.totpSecret) {
|
||||
return res.status(400).json({ message: '2FA is not setup for this user' });
|
||||
}
|
||||
|
||||
if (user.twoFactorEnabled) {
|
||||
const secret = await getTOTPSecret(user.totpSecret);
|
||||
let isVerified = false;
|
||||
|
||||
if (token) {
|
||||
isVerified = await verifyTOTP(secret, token);
|
||||
} else if (backupCode) {
|
||||
isVerified = await verifyBackupCode({ user, backupCode });
|
||||
} else {
|
||||
return res
|
||||
.status(400)
|
||||
.json({ message: 'Either token or backup code is required to disable 2FA' });
|
||||
}
|
||||
|
||||
if (!isVerified) {
|
||||
return res.status(401).json({ message: 'Invalid token or backup code' });
|
||||
}
|
||||
}
|
||||
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
|
||||
return res.status(200).json();
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { webSearchKeys, extractWebSearchEnvVars } = require('@librechat/api');
|
||||
const { webSearchKeys, extractWebSearchEnvVars, normalizeHttpError } = require('@librechat/api');
|
||||
const {
|
||||
getFiles,
|
||||
updateUser,
|
||||
@@ -24,7 +24,13 @@ const { getMCPManager } = require('~/config');
|
||||
const getUserController = async (req, res) => {
|
||||
/** @type {MongoUser} */
|
||||
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
|
||||
/**
|
||||
* These fields should not exist due to secure field selection, but deletion
|
||||
* is done in case of alternate database incompatibility with Mongo API
|
||||
* */
|
||||
delete userData.password;
|
||||
delete userData.totpSecret;
|
||||
delete userData.backupCodes;
|
||||
if (req.app.locals.fileStrategy === FileSources.s3 && userData.avatar) {
|
||||
const avatarNeedsRefresh = needsRefresh(userData.avatar, 3600);
|
||||
if (!avatarNeedsRefresh) {
|
||||
@@ -89,8 +95,8 @@ const updateUserPluginsController = async (req, res) => {
|
||||
|
||||
if (userPluginsService instanceof Error) {
|
||||
logger.error('[userPluginsService]', userPluginsService);
|
||||
const { status, message } = userPluginsService;
|
||||
res.status(status).send({ message });
|
||||
const { status, message } = normalizeHttpError(userPluginsService);
|
||||
return res.status(status).send({ message });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -137,7 +143,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService]', authService);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
}
|
||||
} else if (action === 'uninstall') {
|
||||
@@ -151,7 +157,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
`[authService] Error deleting all auth for MCP tool ${pluginKey}:`,
|
||||
authService,
|
||||
);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
} else {
|
||||
// This handles:
|
||||
@@ -163,7 +169,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name
|
||||
if (authService instanceof Error) {
|
||||
logger.error('[authService] Error deleting specific auth key:', authService);
|
||||
({ status, message } = authService);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -193,7 +199,8 @@ const updateUserPluginsController = async (req, res) => {
|
||||
return res.status(status).send();
|
||||
}
|
||||
|
||||
res.status(status).send({ message });
|
||||
const normalized = normalizeHttpError({ status, message });
|
||||
return res.status(normalized.status).send({ message: normalized.message });
|
||||
} catch (err) {
|
||||
logger.error('[updateUserPluginsController]', err);
|
||||
return res.status(500).json({ message: 'Something went wrong.' });
|
||||
|
||||
@@ -33,18 +33,13 @@ const {
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
findPluginAuthsByKeys,
|
||||
getFormattedMemories,
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { getMCPAuthMap, checkCapability, hasCustomUserVars } = require('~/server/services/Config');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { checkCapability } = require('~/server/services/Config');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
@@ -402,6 +397,34 @@ class AgentClient extends BaseClient {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a promise that resolves with the memory promise result or undefined after a timeout
|
||||
* @param {Promise<(TAttachment | null)[] | undefined>} memoryPromise - The memory promise to await
|
||||
* @param {number} timeoutMs - Timeout in milliseconds (default: 3000)
|
||||
* @returns {Promise<(TAttachment | null)[] | undefined>}
|
||||
*/
|
||||
async awaitMemoryWithTimeout(memoryPromise, timeoutMs = 3000) {
|
||||
if (!memoryPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const timeoutPromise = new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Memory processing timeout')), timeoutMs),
|
||||
);
|
||||
|
||||
const attachments = await Promise.race([memoryPromise, timeoutPromise]);
|
||||
return attachments;
|
||||
} catch (error) {
|
||||
if (error.message === 'Memory processing timeout') {
|
||||
logger.warn('[AgentClient] Memory processing timed out after 3 seconds');
|
||||
} else {
|
||||
logger.error('[AgentClient] Error processing memory:', error);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<string | undefined>}
|
||||
*/
|
||||
@@ -587,6 +610,7 @@ class AgentClient extends BaseClient {
|
||||
await this.chatCompletion({
|
||||
payload,
|
||||
onProgress: opts.onProgress,
|
||||
userMCPAuthMap: opts.userMCPAuthMap,
|
||||
abortController: opts.abortController,
|
||||
});
|
||||
return this.contentParts;
|
||||
@@ -719,7 +743,13 @@ class AgentClient extends BaseClient {
|
||||
return currentMessageTokens > 0 ? currentMessageTokens : originalEstimate;
|
||||
}
|
||||
|
||||
async chatCompletion({ payload, abortController = null }) {
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {string | ChatCompletionMessageParam[]} params.payload
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
* @param {AbortController} [params.abortController]
|
||||
*/
|
||||
async chatCompletion({ payload, userMCPAuthMap, abortController = null }) {
|
||||
/** @type {Partial<GraphRunnableConfig>} */
|
||||
let config;
|
||||
/** @type {ReturnType<createRun>} */
|
||||
@@ -740,6 +770,11 @@ class AgentClient extends BaseClient {
|
||||
last_agent_index: this.agentConfigs?.size ?? 0,
|
||||
user_id: this.user ?? this.options.req.user?.id,
|
||||
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
|
||||
requestBody: {
|
||||
messageId: this.responseMessageId,
|
||||
conversationId: this.conversationId,
|
||||
parentMessageId: this.parentMessageId,
|
||||
},
|
||||
user: this.options.req.user,
|
||||
},
|
||||
recursionLimit: agentsEConfig?.recursionLimit ?? 25,
|
||||
@@ -870,21 +905,9 @@ class AgentClient extends BaseClient {
|
||||
run.Graph.contentData = contentData;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await hasCustomUserVars()) {
|
||||
config.configurable.userMCPAuthMap = await getMCPAuthMap({
|
||||
tools: agent.tools,
|
||||
userId: this.options.req.user.id,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`,
|
||||
err,
|
||||
);
|
||||
if (userMCPAuthMap != null) {
|
||||
config.configurable.userMCPAuthMap = userMCPAuthMap;
|
||||
}
|
||||
|
||||
await run.processStream({ messages }, config, {
|
||||
keepContent: i !== 0,
|
||||
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||
@@ -1002,11 +1025,9 @@ class AgentClient extends BaseClient {
|
||||
});
|
||||
|
||||
try {
|
||||
if (memoryPromise) {
|
||||
const attachments = await memoryPromise;
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
|
||||
await this.recordCollectedUsage({ context: 'message' });
|
||||
@@ -1017,11 +1038,9 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
if (memoryPromise) {
|
||||
const attachments = await memoryPromise;
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||
@@ -1056,7 +1075,6 @@ class AgentClient extends BaseClient {
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
let clientOptions = {
|
||||
maxTokens: 75,
|
||||
model: agent.model || agent.model_parameters.model,
|
||||
};
|
||||
|
||||
@@ -1123,12 +1141,15 @@ class AgentClient extends BaseClient {
|
||||
clientOptions.configuration = options.configOptions;
|
||||
}
|
||||
|
||||
// Ensure maxTokens is set for non-o1 models
|
||||
if (!/\b(o\d)\b/i.test(clientOptions.model) && !clientOptions.maxTokens) {
|
||||
clientOptions.maxTokens = 75;
|
||||
} else if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
if (clientOptions.maxTokens != null) {
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
if (clientOptions?.modelKwargs?.max_completion_tokens != null) {
|
||||
delete clientOptions.modelKwargs.max_completion_tokens;
|
||||
}
|
||||
if (clientOptions?.modelKwargs?.max_output_tokens != null) {
|
||||
delete clientOptions.modelKwargs.max_output_tokens;
|
||||
}
|
||||
|
||||
clientOptions = Object.assign(
|
||||
Object.fromEntries(
|
||||
|
||||
@@ -728,6 +728,239 @@ describe('AgentClient - titleConvo', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getOptions method - GPT-5+ model handling', () => {
|
||||
let mockReq;
|
||||
let mockRes;
|
||||
let mockAgent;
|
||||
let mockOptions;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockAgent = {
|
||||
id: 'agent-123',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
provider: EModelEndpoint.openAI,
|
||||
model_parameters: {
|
||||
model: 'gpt-5',
|
||||
},
|
||||
};
|
||||
|
||||
mockReq = {
|
||||
app: {
|
||||
locals: {},
|
||||
},
|
||||
user: {
|
||||
id: 'user-123',
|
||||
},
|
||||
};
|
||||
|
||||
mockRes = {};
|
||||
|
||||
mockOptions = {
|
||||
req: mockReq,
|
||||
res: mockRes,
|
||||
agent: mockAgent,
|
||||
};
|
||||
|
||||
client = new AgentClient(mockOptions);
|
||||
});
|
||||
|
||||
it('should move maxTokens to modelKwargs.max_completion_tokens for GPT-5 models', () => {
|
||||
const clientOptions = {
|
||||
model: 'gpt-5',
|
||||
maxTokens: 2048,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
// Simulate the getOptions logic that handles GPT-5+ models
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs).toBeDefined();
|
||||
expect(clientOptions.modelKwargs.max_completion_tokens).toBe(2048);
|
||||
expect(clientOptions.temperature).toBe(0.7); // Other options should remain
|
||||
});
|
||||
|
||||
it('should move maxTokens to modelKwargs.max_output_tokens for GPT-5 models with useResponsesApi', () => {
|
||||
const clientOptions = {
|
||||
model: 'gpt-5',
|
||||
maxTokens: 2048,
|
||||
temperature: 0.7,
|
||||
useResponsesApi: true,
|
||||
};
|
||||
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
const paramName =
|
||||
clientOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
|
||||
clientOptions.modelKwargs[paramName] = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs).toBeDefined();
|
||||
expect(clientOptions.modelKwargs.max_output_tokens).toBe(2048);
|
||||
expect(clientOptions.temperature).toBe(0.7); // Other options should remain
|
||||
});
|
||||
|
||||
it('should handle GPT-5+ models with existing modelKwargs', () => {
|
||||
const clientOptions = {
|
||||
model: 'gpt-6',
|
||||
maxTokens: 1500,
|
||||
temperature: 0.8,
|
||||
modelKwargs: {
|
||||
customParam: 'value',
|
||||
},
|
||||
};
|
||||
|
||||
// Simulate the getOptions logic
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs).toEqual({
|
||||
customParam: 'value',
|
||||
max_completion_tokens: 1500,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not modify maxTokens for non-GPT-5+ models', () => {
|
||||
const clientOptions = {
|
||||
model: 'gpt-4',
|
||||
maxTokens: 2048,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
// Simulate the getOptions logic
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
// Should not be modified since it's GPT-4
|
||||
expect(clientOptions.maxTokens).toBe(2048);
|
||||
expect(clientOptions.modelKwargs).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle various GPT-5+ model formats', () => {
|
||||
const testCases = [
|
||||
{ model: 'gpt-5', shouldTransform: true },
|
||||
{ model: 'gpt-5-turbo', shouldTransform: true },
|
||||
{ model: 'gpt-6', shouldTransform: true },
|
||||
{ model: 'gpt-7-preview', shouldTransform: true },
|
||||
{ model: 'gpt-8', shouldTransform: true },
|
||||
{ model: 'gpt-9-mini', shouldTransform: true },
|
||||
{ model: 'gpt-4', shouldTransform: false },
|
||||
{ model: 'gpt-4o', shouldTransform: false },
|
||||
{ model: 'gpt-3.5-turbo', shouldTransform: false },
|
||||
{ model: 'claude-3', shouldTransform: false },
|
||||
];
|
||||
|
||||
testCases.forEach(({ model, shouldTransform }) => {
|
||||
const clientOptions = {
|
||||
model,
|
||||
maxTokens: 1000,
|
||||
};
|
||||
|
||||
// Simulate the getOptions logic
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
if (shouldTransform) {
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs?.max_completion_tokens).toBe(1000);
|
||||
} else {
|
||||
expect(clientOptions.maxTokens).toBe(1000);
|
||||
expect(clientOptions.modelKwargs).toBeUndefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should not swap max token param for older models when using useResponsesApi', () => {
|
||||
const testCases = [
|
||||
{ model: 'gpt-5', shouldTransform: true },
|
||||
{ model: 'gpt-5-turbo', shouldTransform: true },
|
||||
{ model: 'gpt-6', shouldTransform: true },
|
||||
{ model: 'gpt-7-preview', shouldTransform: true },
|
||||
{ model: 'gpt-8', shouldTransform: true },
|
||||
{ model: 'gpt-9-mini', shouldTransform: true },
|
||||
{ model: 'gpt-4', shouldTransform: false },
|
||||
{ model: 'gpt-4o', shouldTransform: false },
|
||||
{ model: 'gpt-3.5-turbo', shouldTransform: false },
|
||||
{ model: 'claude-3', shouldTransform: false },
|
||||
];
|
||||
|
||||
testCases.forEach(({ model, shouldTransform }) => {
|
||||
const clientOptions = {
|
||||
model,
|
||||
maxTokens: 1000,
|
||||
useResponsesApi: true,
|
||||
};
|
||||
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
const paramName =
|
||||
clientOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
|
||||
clientOptions.modelKwargs[paramName] = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
if (shouldTransform) {
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs?.max_output_tokens).toBe(1000);
|
||||
} else {
|
||||
expect(clientOptions.maxTokens).toBe(1000);
|
||||
expect(clientOptions.modelKwargs).toBeUndefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should not transform if maxTokens is null or undefined', () => {
|
||||
const testCases = [
|
||||
{ model: 'gpt-5', maxTokens: null },
|
||||
{ model: 'gpt-5', maxTokens: undefined },
|
||||
{ model: 'gpt-6', maxTokens: 0 }, // Should transform even if 0
|
||||
];
|
||||
|
||||
testCases.forEach(({ model, maxTokens }, index) => {
|
||||
const clientOptions = {
|
||||
model,
|
||||
maxTokens,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
// Simulate the getOptions logic
|
||||
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
|
||||
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
|
||||
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
if (index < 2) {
|
||||
// null or undefined cases
|
||||
expect(clientOptions.maxTokens).toBe(maxTokens);
|
||||
expect(clientOptions.modelKwargs).toBeUndefined();
|
||||
} else {
|
||||
// 0 case - should transform
|
||||
expect(clientOptions.maxTokens).toBeUndefined();
|
||||
expect(clientOptions.modelKwargs?.max_completion_tokens).toBe(0);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('runMemory method', () => {
|
||||
let client;
|
||||
let mockReq;
|
||||
|
||||
@@ -9,6 +9,24 @@ const {
|
||||
const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup');
|
||||
const { saveMessage } = require('~/models');
|
||||
|
||||
function createCloseHandler(abortController) {
|
||||
return function (manual) {
|
||||
if (!manual) {
|
||||
logger.debug('[AgentController] Request closed');
|
||||
}
|
||||
if (!abortController) {
|
||||
return;
|
||||
} else if (abortController.signal.aborted) {
|
||||
return;
|
||||
} else if (abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
abortController.abort();
|
||||
logger.debug('[AgentController] Request aborted on close');
|
||||
};
|
||||
}
|
||||
|
||||
const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let {
|
||||
text,
|
||||
@@ -31,7 +49,6 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let userMessagePromise;
|
||||
let getAbortData;
|
||||
let client = null;
|
||||
// Initialize as an array
|
||||
let cleanupHandlers = [];
|
||||
|
||||
const newConvo = !conversationId;
|
||||
@@ -62,9 +79,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
// Create a function to handle final cleanup
|
||||
const performCleanup = () => {
|
||||
logger.debug('[AgentController] Performing cleanup');
|
||||
// Make sure cleanupHandlers is an array before iterating
|
||||
if (Array.isArray(cleanupHandlers)) {
|
||||
// Execute all cleanup handlers
|
||||
for (const handler of cleanupHandlers) {
|
||||
try {
|
||||
if (typeof handler === 'function') {
|
||||
@@ -105,8 +120,33 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
};
|
||||
|
||||
try {
|
||||
/** @type {{ client: TAgentClient }} */
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
let prelimAbortController = new AbortController();
|
||||
const prelimCloseHandler = createCloseHandler(prelimAbortController);
|
||||
res.on('close', prelimCloseHandler);
|
||||
const removePrelimHandler = (manual) => {
|
||||
try {
|
||||
prelimCloseHandler(manual);
|
||||
res.removeListener('close', prelimCloseHandler);
|
||||
} catch (e) {
|
||||
logger.error('[AgentController] Error removing close listener', e);
|
||||
}
|
||||
};
|
||||
cleanupHandlers.push(removePrelimHandler);
|
||||
/** @type {{ client: TAgentClient; userMCPAuthMap?: Record<string, Record<string, string>> }} */
|
||||
const result = await initializeClient({
|
||||
req,
|
||||
res,
|
||||
endpointOption,
|
||||
signal: prelimAbortController.signal,
|
||||
});
|
||||
if (prelimAbortController.signal?.aborted) {
|
||||
prelimAbortController = null;
|
||||
throw new Error('Request was aborted before initialization could complete');
|
||||
} else {
|
||||
prelimAbortController = null;
|
||||
removePrelimHandler(true);
|
||||
cleanupHandlers.pop();
|
||||
}
|
||||
client = result.client;
|
||||
|
||||
// Register client with finalization registry if available
|
||||
@@ -138,22 +178,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
};
|
||||
|
||||
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||
|
||||
// Simple handler to avoid capturing scope
|
||||
const closeHandler = () => {
|
||||
logger.debug('[AgentController] Request closed');
|
||||
if (!abortController) {
|
||||
return;
|
||||
} else if (abortController.signal.aborted) {
|
||||
return;
|
||||
} else if (abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
abortController.abort();
|
||||
logger.debug('[AgentController] Request aborted on close');
|
||||
};
|
||||
|
||||
const closeHandler = createCloseHandler(abortController);
|
||||
res.on('close', closeHandler);
|
||||
cleanupHandlers.push(() => {
|
||||
try {
|
||||
@@ -175,6 +200,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
abortController,
|
||||
overrideParentMessageId,
|
||||
isEdited: !!editedContent,
|
||||
userMCPAuthMap: result.userMCPAuthMap,
|
||||
responseMessageId: editedResponseMessageId,
|
||||
progressOptions: {
|
||||
res,
|
||||
@@ -233,6 +259,26 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
);
|
||||
}
|
||||
}
|
||||
// Edge case: sendMessage completed but abort happened during sendCompletion
|
||||
// We need to ensure a final event is sent
|
||||
else if (!res.headersSent && !res.finished) {
|
||||
logger.debug(
|
||||
'[AgentController] Handling edge case: `sendMessage` completed but aborted during `sendCompletion`',
|
||||
);
|
||||
|
||||
const finalResponse = { ...response };
|
||||
finalResponse.error = true;
|
||||
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
responseMessage: finalResponse,
|
||||
error: { message: 'Request was aborted during completion' },
|
||||
});
|
||||
res.end();
|
||||
}
|
||||
|
||||
// Save user message if needed
|
||||
if (!client.skipSaveUserMessage) {
|
||||
|
||||
@@ -217,6 +217,9 @@ const updateAgentHandler = async (req, res) => {
|
||||
})
|
||||
: existingAgent;
|
||||
|
||||
// Add version count to the response
|
||||
updatedAgent.version = updatedAgent.versions ? updatedAgent.versions.length : 0;
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
|
||||
@@ -41,7 +42,27 @@ jest.mock('~/models/File', () => ({
|
||||
deleteFileByFilter: jest.fn(),
|
||||
}));
|
||||
|
||||
const { createAgent: createAgentHandler, updateAgent: updateAgentHandler } = require('./v1');
|
||||
jest.mock('~/server/services/PermissionService', () => ({
|
||||
findAccessibleResources: jest.fn().mockResolvedValue([]),
|
||||
findPubliclyAccessibleResources: jest.fn().mockResolvedValue([]),
|
||||
grantPermission: jest.fn(),
|
||||
hasPublicPermission: jest.fn().mockResolvedValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('~/models', () => ({
|
||||
getCategoriesWithCounts: jest.fn(),
|
||||
}));
|
||||
|
||||
const {
|
||||
createAgent: createAgentHandler,
|
||||
updateAgent: updateAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
} = require('./v1');
|
||||
|
||||
const {
|
||||
findAccessibleResources,
|
||||
findPubliclyAccessibleResources,
|
||||
} = require('~/server/services/PermissionService');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
||||
@@ -79,6 +100,7 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
},
|
||||
body: {},
|
||||
params: {},
|
||||
query: {},
|
||||
app: {
|
||||
locals: {
|
||||
fileStrategy: 'local',
|
||||
@@ -527,6 +549,28 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
expect(mockRes.json).toHaveBeenCalledWith({ error: 'Agent not found' });
|
||||
});
|
||||
|
||||
test('should include version field in update response', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Updated with Version Check',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify version field is included and is a number
|
||||
expect(updatedAgent).toHaveProperty('version');
|
||||
expect(typeof updatedAgent.version).toBe('number');
|
||||
expect(updatedAgent.version).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(updatedAgent.version).toBe(agentInDb.versions.length);
|
||||
});
|
||||
|
||||
test('should handle validation errors properly', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
@@ -646,4 +690,373 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
expect(agentInDb.futureFeature).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getListAgentsHandler - Security Tests', () => {
|
||||
let userA, userB;
|
||||
let agentA1, agentA2, agentA3, agentB1;
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create two test users
|
||||
userA = new mongoose.Types.ObjectId();
|
||||
userB = new mongoose.Types.ObjectId();
|
||||
|
||||
// Create agents for User A
|
||||
agentA1 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A1',
|
||||
description: 'User A agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
agentA2 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A2',
|
||||
description: 'User A agent 2',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
agentA3 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
category: 'productivity',
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent A3',
|
||||
description: 'User A agent 3',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
category: 'productivity',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Create an agent for User B
|
||||
agentB1 = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userB,
|
||||
versions: [
|
||||
{
|
||||
name: 'Agent B1',
|
||||
description: 'User B agent 1',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('should return empty list when user has no accessible agents', async () => {
|
||||
// User B has no permissions and no owned agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(findAccessibleResources).toHaveBeenCalledWith({
|
||||
userId: userB.toString(),
|
||||
role: 'USER',
|
||||
resourceType: 'agent',
|
||||
requiredPermissions: 1, // VIEW permission
|
||||
});
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
object: 'list',
|
||||
data: [],
|
||||
first_id: null,
|
||||
last_id: null,
|
||||
has_more: false,
|
||||
after: null,
|
||||
});
|
||||
});
|
||||
|
||||
test('should not return other users agents when accessibleIds is empty', async () => {
|
||||
// User B trying to see agents with no permissions
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(0);
|
||||
|
||||
// Verify User A's agents are not included
|
||||
const agentIds = response.data.map((a) => a.id);
|
||||
expect(agentIds).not.toContain(agentA1.id);
|
||||
expect(agentIds).not.toContain(agentA2.id);
|
||||
expect(agentIds).not.toContain(agentA3.id);
|
||||
});
|
||||
|
||||
test('should only return agents user has access to', async () => {
|
||||
// User B has access to one of User A's agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA1.id);
|
||||
expect(response.data[0].name).toBe('Agent A1');
|
||||
});
|
||||
|
||||
test('should return multiple accessible agents', async () => {
|
||||
// User B has access to multiple agents
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA3._id, agentB1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(3);
|
||||
|
||||
const agentIds = response.data.map((a) => a.id);
|
||||
expect(agentIds).toContain(agentA1.id);
|
||||
expect(agentIds).toContain(agentA3.id);
|
||||
expect(agentIds).toContain(agentB1.id);
|
||||
expect(agentIds).not.toContain(agentA2.id);
|
||||
});
|
||||
|
||||
test('should apply category filter correctly with ACL', async () => {
|
||||
// User has access to all agents but filters by category
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.category = 'productivity';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA3.id);
|
||||
expect(response.data[0].category).toBe('productivity');
|
||||
});
|
||||
|
||||
test('should apply search filter correctly with ACL', async () => {
|
||||
// User has access to multiple agents but searches for specific one
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.search = 'A2';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(agentA2.id);
|
||||
});
|
||||
|
||||
test('should handle pagination with ACL filtering', async () => {
|
||||
// Create more agents for pagination testing
|
||||
const moreAgents = [];
|
||||
for (let i = 4; i <= 10; i++) {
|
||||
const agent = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
versions: [
|
||||
{
|
||||
name: `Agent A${i}`,
|
||||
description: `User A agent ${i}`,
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
moreAgents.push(agent);
|
||||
}
|
||||
|
||||
// User has access to all agents
|
||||
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.limit = '5';
|
||||
findAccessibleResources.mockResolvedValue(allAgentIds);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(5);
|
||||
expect(response.has_more).toBe(true);
|
||||
expect(response.after).toBeTruthy();
|
||||
});
|
||||
|
||||
test('should mark publicly accessible agents', async () => {
|
||||
// User has access to agents, some are public
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([agentA2._id]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(2);
|
||||
|
||||
const publicAgent = response.data.find((a) => a.id === agentA2.id);
|
||||
const privateAgent = response.data.find((a) => a.id === agentA1.id);
|
||||
|
||||
expect(publicAgent.isPublic).toBe(true);
|
||||
expect(privateAgent.isPublic).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle requiredPermission parameter', async () => {
|
||||
// Test with different permission levels
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.requiredPermission = '15'; // FULL_ACCESS
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(findAccessibleResources).toHaveBeenCalledWith({
|
||||
userId: userB.toString(),
|
||||
role: 'USER',
|
||||
resourceType: 'agent',
|
||||
requiredPermissions: 15,
|
||||
});
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should handle promoted filter with ACL', async () => {
|
||||
// Create a promoted agent
|
||||
const promotedAgent = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Promoted Agent',
|
||||
description: 'A promoted agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
is_promoted: true,
|
||||
versions: [
|
||||
{
|
||||
name: 'Promoted Agent',
|
||||
description: 'A promoted agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
is_promoted: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.promoted = '1';
|
||||
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, promotedAgent._id]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(promotedAgent.id);
|
||||
expect(response.data[0].is_promoted).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle errors gracefully', async () => {
|
||||
mockReq.user.id = userB.toString();
|
||||
findAccessibleResources.mockRejectedValue(new Error('Permission service error'));
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
error: 'Permission service error',
|
||||
});
|
||||
});
|
||||
|
||||
test('should respect combined filters with ACL', async () => {
|
||||
// Create agents with specific attributes
|
||||
const productivityPromoted = await Agent.create({
|
||||
id: `agent_${nanoid(12)}`,
|
||||
name: 'Productivity Pro',
|
||||
description: 'A promoted productivity agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: userA,
|
||||
category: 'productivity',
|
||||
is_promoted: true,
|
||||
versions: [
|
||||
{
|
||||
name: 'Productivity Pro',
|
||||
description: 'A promoted productivity agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
category: 'productivity',
|
||||
is_promoted: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
mockReq.user.id = userB.toString();
|
||||
mockReq.query.category = 'productivity';
|
||||
mockReq.query.promoted = '1';
|
||||
findAccessibleResources.mockResolvedValue([
|
||||
agentA1._id,
|
||||
agentA2._id,
|
||||
agentA3._id,
|
||||
productivityPromoted._id,
|
||||
]);
|
||||
findPubliclyAccessibleResources.mockResolvedValue([]);
|
||||
|
||||
await getListAgentsHandler(mockReq, mockRes);
|
||||
|
||||
const response = mockRes.json.mock.calls[0][0];
|
||||
expect(response.data).toHaveLength(1);
|
||||
expect(response.data[0].id).toBe(productivityPromoted.id);
|
||||
expect(response.data[0].category).toBe('productivity');
|
||||
expect(response.data[0].is_promoted).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,10 +22,11 @@ const verify2FAWithTempToken = async (req, res) => {
|
||||
try {
|
||||
payload = jwt.verify(tempToken, process.env.JWT_SECRET);
|
||||
} catch (err) {
|
||||
logger.error('Failed to verify temporary token:', err);
|
||||
return res.status(401).json({ message: 'Invalid or expired temporary token' });
|
||||
}
|
||||
|
||||
const user = await getUserById(payload.userId);
|
||||
const user = await getUserById(payload.userId, '+totpSecret +backupCodes');
|
||||
if (!user || !user.twoFactorEnabled) {
|
||||
return res.status(400).json({ message: '2FA is not enabled for this user' });
|
||||
}
|
||||
@@ -42,11 +43,11 @@ const verify2FAWithTempToken = async (req, res) => {
|
||||
return res.status(401).json({ message: 'Invalid 2FA code or backup code' });
|
||||
}
|
||||
|
||||
// Prepare user data to return (omit sensitive fields).
|
||||
const userData = user.toObject ? user.toObject() : { ...user };
|
||||
delete userData.password;
|
||||
delete userData.__v;
|
||||
delete userData.password;
|
||||
delete userData.totpSecret;
|
||||
delete userData.backupCodes;
|
||||
userData.id = user._id.toString();
|
||||
|
||||
const authToken = await setAuthTokens(user._id, res);
|
||||
|
||||
@@ -8,14 +8,13 @@ const express = require('express');
|
||||
const passport = require('passport');
|
||||
const compression = require('compression');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const { isEnabled, ErrorController } = require('@librechat/api');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const { checkMigrations } = require('./services/start/migration');
|
||||
const initializeMCPs = require('./services/initializeMCPs');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
const AppService = require('./services/AppService');
|
||||
@@ -122,8 +121,7 @@ const startServer = async () => {
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use('/api/mcp', routes.mcp);
|
||||
|
||||
// Add the error controller one more time after all routes
|
||||
app.use(errorController);
|
||||
app.use(ErrorController);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.set({
|
||||
@@ -148,7 +146,7 @@ const startServer = async () => {
|
||||
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||
}
|
||||
|
||||
initializeMCPs(app);
|
||||
initializeMCPs(app).then(() => checkMigrations());
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ async function healthCheckPoll(app, retries = 0) {
|
||||
if (response.status === 200) {
|
||||
return; // App is healthy
|
||||
}
|
||||
} catch (error) {
|
||||
} catch {
|
||||
// Ignore connection errors during polling
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
|
||||
const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider');
|
||||
const { isAssistantsEndpoint, ErrorTypes, Constants } = require('librechat-data-provider');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
const { sendError } = require('~/server/middleware/error');
|
||||
@@ -11,6 +11,10 @@ const { abortRun } = require('./abortRun');
|
||||
|
||||
const abortDataMap = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {string} abortKey
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function cleanupAbortController(abortKey) {
|
||||
if (!abortControllers.has(abortKey)) {
|
||||
return false;
|
||||
@@ -71,6 +75,20 @@ function cleanupAbortController(abortKey) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} abortKey
|
||||
* @returns {function(): void}
|
||||
*/
|
||||
function createCleanUpHandler(abortKey) {
|
||||
return function () {
|
||||
try {
|
||||
cleanupAbortController(abortKey);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function abortMessage(req, res) {
|
||||
let { abortKey, endpoint } = req.body;
|
||||
|
||||
@@ -172,11 +190,15 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
/**
|
||||
* @param {TMessage} userMessage
|
||||
* @param {string} responseMessageId
|
||||
* @param {boolean} [isNewConvo]
|
||||
*/
|
||||
const onStart = (userMessage, responseMessageId) => {
|
||||
const onStart = (userMessage, responseMessageId, isNewConvo) => {
|
||||
sendEvent(res, { message: userMessage, created: true });
|
||||
|
||||
const abortKey = userMessage?.conversationId ?? req.user.id;
|
||||
const prelimAbortKey = userMessage?.conversationId ?? req.user.id;
|
||||
const abortKey = isNewConvo
|
||||
? `${prelimAbortKey}${Constants.COMMON_DIVIDER}${Constants.NEW_CONVO}`
|
||||
: prelimAbortKey;
|
||||
getReqData({ abortKey });
|
||||
const prevRequest = abortControllers.get(abortKey);
|
||||
const { overrideUserMessageId } = req?.body ?? {};
|
||||
@@ -194,16 +216,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
};
|
||||
|
||||
abortControllers.set(addedAbortKey, { abortController, ...minimalOptions });
|
||||
|
||||
// Use a simple function for cleanup to avoid capturing context
|
||||
const cleanupHandler = () => {
|
||||
try {
|
||||
cleanupAbortController(addedAbortKey);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
const cleanupHandler = createCleanUpHandler(addedAbortKey);
|
||||
res.on('finish', cleanupHandler);
|
||||
return;
|
||||
}
|
||||
@@ -216,16 +229,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
};
|
||||
|
||||
abortControllers.set(abortKey, { abortController, ...minimalOptions });
|
||||
|
||||
// Use a simple function for cleanup to avoid capturing context
|
||||
const cleanupHandler = () => {
|
||||
try {
|
||||
cleanupAbortController(abortKey);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
const cleanupHandler = createCleanUpHandler(abortKey);
|
||||
res.on('finish', cleanupHandler);
|
||||
};
|
||||
|
||||
@@ -364,15 +368,7 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
};
|
||||
}
|
||||
|
||||
// Create a simple callback without capturing parent scope
|
||||
const callback = async () => {
|
||||
try {
|
||||
cleanupAbortController(conversationId);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
const callback = createCleanUpHandler(conversationId);
|
||||
await sendError(req, res, options, callback);
|
||||
};
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ const { logger } = require('~/config');
|
||||
* Checks specific permission based on the 'type' query parameter:
|
||||
* - type=user: requires VIEW_USERS permission
|
||||
* - type=group: requires VIEW_GROUPS permission
|
||||
* - no type (mixed search): requires either VIEW_USERS OR VIEW_GROUPS
|
||||
* - type=role: requires VIEW_ROLES permission
|
||||
* - no type (mixed search): requires either VIEW_USERS OR VIEW_GROUPS OR VIEW_ROLES
|
||||
*/
|
||||
const checkPeoplePickerAccess = async (req, res, next) => {
|
||||
try {
|
||||
@@ -31,29 +32,38 @@ const checkPeoplePickerAccess = async (req, res, next) => {
|
||||
const peoplePickerPerms = role.permissions[PermissionTypes.PEOPLE_PICKER] || {};
|
||||
const canViewUsers = peoplePickerPerms[Permissions.VIEW_USERS] === true;
|
||||
const canViewGroups = peoplePickerPerms[Permissions.VIEW_GROUPS] === true;
|
||||
const canViewRoles = peoplePickerPerms[Permissions.VIEW_ROLES] === true;
|
||||
|
||||
if (type === PrincipalType.USER) {
|
||||
if (!canViewUsers) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
}
|
||||
} else if (type === PrincipalType.GROUP) {
|
||||
if (!canViewGroups) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (!canViewUsers || !canViewGroups) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for both users and groups',
|
||||
});
|
||||
}
|
||||
const permissionChecks = {
|
||||
[PrincipalType.USER]: {
|
||||
hasPermission: canViewUsers,
|
||||
message: 'Insufficient permissions to search for users',
|
||||
},
|
||||
[PrincipalType.GROUP]: {
|
||||
hasPermission: canViewGroups,
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
},
|
||||
[PrincipalType.ROLE]: {
|
||||
hasPermission: canViewRoles,
|
||||
message: 'Insufficient permissions to search for roles',
|
||||
},
|
||||
};
|
||||
|
||||
const check = permissionChecks[type];
|
||||
if (check && !check.hasPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: check.message,
|
||||
});
|
||||
}
|
||||
|
||||
if (!type && !canViewUsers && !canViewGroups && !canViewRoles) {
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users, groups, or roles',
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
|
||||
250
api/server/middleware/checkPeoplePickerAccess.spec.js
Normal file
250
api/server/middleware/checkPeoplePickerAccess.spec.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { checkPeoplePickerAccess } = require('./checkPeoplePickerAccess');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
jest.mock('~/models/Role');
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('checkPeoplePickerAccess', () => {
|
||||
let req, res, next;
|
||||
|
||||
beforeEach(() => {
|
||||
req = {
|
||||
user: { id: 'user123', role: 'USER' },
|
||||
query: {},
|
||||
};
|
||||
res = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn(),
|
||||
};
|
||||
next = jest.fn();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return 401 if user is not authenticated', async () => {
|
||||
req.user = null;
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Unauthorized',
|
||||
message: 'Authentication required',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 403 if role has no permissions', async () => {
|
||||
getRoleByName.mockResolvedValue(null);
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'No permissions configured for user role',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for users with VIEW_USERS permission', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for users without VIEW_USERS permission', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for groups with VIEW_GROUPS permission', async () => {
|
||||
req.query.type = PrincipalType.GROUP;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for groups without VIEW_GROUPS permission', async () => {
|
||||
req.query.type = PrincipalType.GROUP;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for groups',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow access when searching for roles with VIEW_ROLES permission', async () => {
|
||||
req.query.type = PrincipalType.ROLE;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny access when searching for roles without VIEW_ROLES permission', async () => {
|
||||
req.query.type = PrincipalType.ROLE;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: true,
|
||||
[Permissions.VIEW_GROUPS]: true,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for roles',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow mixed search when user has at least one permission', async () => {
|
||||
// No type specified = mixed search
|
||||
req.query.type = undefined;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should deny mixed search when user has no permissions', async () => {
|
||||
// No type specified = mixed search
|
||||
req.query.type = undefined;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: false,
|
||||
[Permissions.VIEW_GROUPS]: false,
|
||||
[Permissions.VIEW_ROLES]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users, groups, or roles',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
const error = new Error('Database error');
|
||||
getRoleByName.mockRejectedValue(error);
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[checkPeoplePickerAccess][user123] checkPeoplePickerAccess error for req.query.type = undefined',
|
||||
error,
|
||||
);
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to check permissions',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle missing permissions object gracefully', async () => {
|
||||
req.query.type = PrincipalType.USER;
|
||||
getRoleByName.mockResolvedValue({
|
||||
permissions: {}, // No PEOPLE_PICKER permissions
|
||||
});
|
||||
|
||||
await checkPeoplePickerAccess(req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to search for users',
|
||||
});
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -9,7 +9,6 @@ const validateEndpoint = require('./validateEndpoint');
|
||||
const requireLocalAuth = require('./requireLocalAuth');
|
||||
const canDeleteAccount = require('./canDeleteAccount');
|
||||
const accessResources = require('./accessResources');
|
||||
const setBalanceConfig = require('./setBalanceConfig');
|
||||
const requireLdapAuth = require('./requireLdapAuth');
|
||||
const abortMiddleware = require('./abortMiddleware');
|
||||
const checkInviteUser = require('./checkInviteUser');
|
||||
@@ -44,7 +43,6 @@ module.exports = {
|
||||
requireLocalAuth,
|
||||
canDeleteAccount,
|
||||
validateEndpoint,
|
||||
setBalanceConfig,
|
||||
concurrentLimiter,
|
||||
checkDomainAllowed,
|
||||
validateMessageReq,
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
/**
|
||||
* Middleware to synchronize user balance settings with current balance configuration.
|
||||
* @function
|
||||
* @param {Object} req - Express request object containing user information.
|
||||
* @param {Object} res - Express response object.
|
||||
* @param {import('express').NextFunction} next - Next middleware function.
|
||||
*/
|
||||
const setBalanceConfig = async (req, res, next) => {
|
||||
try {
|
||||
const balanceConfig = await getBalanceConfig();
|
||||
if (!balanceConfig?.enabled) {
|
||||
return next();
|
||||
}
|
||||
if (balanceConfig.startBalance == null) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const userId = req.user._id;
|
||||
const userBalanceRecord = await Balance.findOne({ user: userId }).lean();
|
||||
const updateFields = buildUpdateFields(balanceConfig, userBalanceRecord);
|
||||
|
||||
if (Object.keys(updateFields).length === 0) {
|
||||
return next();
|
||||
}
|
||||
|
||||
await Balance.findOneAndUpdate(
|
||||
{ user: userId },
|
||||
{ $set: updateFields },
|
||||
{ upsert: true, new: true },
|
||||
);
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Error setting user balance:', error);
|
||||
next(error);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Build an object containing fields that need updating
|
||||
* @param {Object} config - The balance configuration
|
||||
* @param {Object|null} userRecord - The user's current balance record, if any
|
||||
* @returns {Object} Fields that need updating
|
||||
*/
|
||||
function buildUpdateFields(config, userRecord) {
|
||||
const updateFields = {};
|
||||
|
||||
// Ensure user record has the required fields
|
||||
if (!userRecord) {
|
||||
updateFields.user = userRecord?.user;
|
||||
updateFields.tokenCredits = config.startBalance;
|
||||
}
|
||||
|
||||
if (userRecord?.tokenCredits == null && config.startBalance != null) {
|
||||
updateFields.tokenCredits = config.startBalance;
|
||||
}
|
||||
|
||||
const isAutoRefillConfigValid =
|
||||
config.autoRefillEnabled &&
|
||||
config.refillIntervalValue != null &&
|
||||
config.refillIntervalUnit != null &&
|
||||
config.refillAmount != null;
|
||||
|
||||
if (!isAutoRefillConfigValid) {
|
||||
return updateFields;
|
||||
}
|
||||
|
||||
if (userRecord?.autoRefillEnabled !== config.autoRefillEnabled) {
|
||||
updateFields.autoRefillEnabled = config.autoRefillEnabled;
|
||||
}
|
||||
|
||||
if (userRecord?.refillIntervalValue !== config.refillIntervalValue) {
|
||||
updateFields.refillIntervalValue = config.refillIntervalValue;
|
||||
}
|
||||
|
||||
if (userRecord?.refillIntervalUnit !== config.refillIntervalUnit) {
|
||||
updateFields.refillIntervalUnit = config.refillIntervalUnit;
|
||||
}
|
||||
|
||||
if (userRecord?.refillAmount !== config.refillAmount) {
|
||||
updateFields.refillAmount = config.refillAmount;
|
||||
}
|
||||
|
||||
return updateFields;
|
||||
}
|
||||
|
||||
module.exports = setBalanceConfig;
|
||||
@@ -33,7 +33,7 @@ const validateModel = async (req, res, next) => {
|
||||
return next();
|
||||
}
|
||||
|
||||
const { ILLEGAL_MODEL_REQ_SCORE: score = 5 } = process.env ?? {};
|
||||
const { ILLEGAL_MODEL_REQ_SCORE: score = 1 } = process.env ?? {};
|
||||
|
||||
const type = ViolationTypes.ILLEGAL_MODEL_REQUEST;
|
||||
const errorMessage = {
|
||||
|
||||
@@ -4,12 +4,14 @@ const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
...jest.requireActual('@librechat/api'),
|
||||
MCPOAuthHandler: {
|
||||
initiateOAuthFlow: jest.fn(),
|
||||
getFlowState: jest.fn(),
|
||||
completeOAuthFlow: jest.fn(),
|
||||
generateFlowId: jest.fn(),
|
||||
},
|
||||
getUserMCPAuthMap: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
@@ -36,6 +38,7 @@ jest.mock('~/models', () => ({
|
||||
updateToken: jest.fn(),
|
||||
createToken: jest.fn(),
|
||||
deleteTokens: jest.fn(),
|
||||
findPluginAuthsByKeys: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
@@ -44,6 +47,10 @@ jest.mock('~/server/services/Config', () => ({
|
||||
loadCustomConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config/mcpToolsCache', () => ({
|
||||
updateMCPUserTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/MCP', () => ({
|
||||
getMCPSetupData: jest.fn(),
|
||||
getServerConnectionStatus: jest.fn(),
|
||||
@@ -66,6 +73,10 @@ jest.mock('~/server/middleware', () => ({
|
||||
requireJwtAuth: (req, res, next) => next(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/mcp', () => ({
|
||||
reinitMCPServer: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('MCP Routes', () => {
|
||||
let app;
|
||||
let mongoServer;
|
||||
@@ -494,12 +505,9 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should return 500 when token retrieval throws an unexpected error', async () => {
|
||||
const mockFlowManager = {
|
||||
getFlowState: jest.fn().mockRejectedValue(new Error('Database connection failed')),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValue({});
|
||||
require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager);
|
||||
getLogStores.mockImplementation(() => {
|
||||
throw new Error('Database connection failed');
|
||||
});
|
||||
|
||||
const response = await request(app).get('/api/mcp/oauth/tokens/test-user-id:error-flow');
|
||||
|
||||
@@ -563,8 +571,8 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
describe('POST /oauth/cancel/:serverName', () => {
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
it('should cancel OAuth flow successfully', async () => {
|
||||
const mockFlowManager = {
|
||||
@@ -644,15 +652,15 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
describe('POST /:serverName/reinitialize', () => {
|
||||
const { loadCustomConfig } = require('~/server/services/Config');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
|
||||
it('should return 404 when server is not found in configuration', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'other-server': {},
|
||||
},
|
||||
});
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue(null),
|
||||
disconnectUserConnection: jest.fn().mockResolvedValue(),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
|
||||
const response = await request(app).post('/api/mcp/non-existent-server/reinitialize');
|
||||
|
||||
@@ -663,16 +671,11 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should handle OAuth requirement during reinitialize', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'oauth-server': {
|
||||
customUserVars: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const mockMcpManager = {
|
||||
disconnectServer: jest.fn().mockResolvedValue(),
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
customUserVars: {},
|
||||
}),
|
||||
disconnectUserConnection: jest.fn().mockResolvedValue(),
|
||||
mcpConfigs: {},
|
||||
getUserConnection: jest.fn().mockImplementation(async ({ oauthStart }) => {
|
||||
if (oauthStart) {
|
||||
@@ -685,12 +688,19 @@ describe('MCP Routes', () => {
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'oauth-server' ready for OAuth authentication",
|
||||
serverName: 'oauth-server',
|
||||
oauthRequired: true,
|
||||
oauthUrl: 'https://oauth.example.com/auth',
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/oauth-server/reinitialize');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
success: 'https://oauth.example.com/auth',
|
||||
success: true,
|
||||
message: "MCP server 'oauth-server' ready for OAuth authentication",
|
||||
serverName: 'oauth-server',
|
||||
oauthRequired: true,
|
||||
@@ -699,14 +709,9 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should return 500 when reinitialize fails with non-OAuth error', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'error-server': {},
|
||||
},
|
||||
});
|
||||
|
||||
const mockMcpManager = {
|
||||
disconnectServer: jest.fn().mockResolvedValue(),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
disconnectUserConnection: jest.fn().mockResolvedValue(),
|
||||
mcpConfigs: {},
|
||||
getUserConnection: jest.fn().mockRejectedValue(new Error('Connection failed')),
|
||||
};
|
||||
@@ -714,6 +719,7 @@ describe('MCP Routes', () => {
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue(null);
|
||||
|
||||
const response = await request(app).post('/api/mcp/error-server/reinitialize');
|
||||
|
||||
@@ -724,7 +730,13 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should return 500 when unexpected error occurs', async () => {
|
||||
loadCustomConfig.mockRejectedValue(new Error('Config loading failed'));
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockImplementation(() => {
|
||||
throw new Error('Config loading failed');
|
||||
}),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize');
|
||||
|
||||
@@ -747,29 +759,17 @@ describe('MCP Routes', () => {
|
||||
expect(response.body).toEqual({ error: 'User not authenticated' });
|
||||
});
|
||||
|
||||
it('should handle errors when fetching custom user variables', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: {
|
||||
API_KEY: 'test-key-var',
|
||||
SECRET_TOKEN: 'test-secret-var',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
getUserPluginAuthValue
|
||||
.mockResolvedValueOnce('test-api-key-value')
|
||||
.mockRejectedValueOnce(new Error('Database error'));
|
||||
|
||||
it('should successfully reinitialize server and cache tools', async () => {
|
||||
const mockUserConnection = {
|
||||
fetchTools: jest.fn().mockResolvedValue([]),
|
||||
fetchTools: jest.fn().mockResolvedValue([
|
||||
{ name: 'tool1', description: 'Test tool 1', inputSchema: { type: 'object' } },
|
||||
{ name: 'tool2', description: 'Test tool 2', inputSchema: { type: 'object' } },
|
||||
]),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
disconnectServer: jest.fn().mockResolvedValue(),
|
||||
mcpConfigs: {},
|
||||
getRawConfig: jest.fn().mockReturnValue({ endpoint: 'http://test-server.com' }),
|
||||
disconnectUserConnection: jest.fn().mockResolvedValue(),
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
};
|
||||
|
||||
@@ -778,44 +778,86 @@ describe('MCP Routes', () => {
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
|
||||
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
getCachedTools.mockResolvedValue({});
|
||||
setCachedTools.mockResolvedValue();
|
||||
updateMCPUserTools.mockResolvedValue();
|
||||
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
serverName: 'test-server',
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body).toEqual({
|
||||
success: true,
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
serverName: 'test-server',
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
});
|
||||
expect(mockMcpManager.disconnectUserConnection).toHaveBeenCalledWith(
|
||||
'test-user-id',
|
||||
'test-server',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return failure message when reinitialize completely fails', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': {},
|
||||
},
|
||||
});
|
||||
it('should handle server with custom user variables', async () => {
|
||||
const mockUserConnection = {
|
||||
fetchTools: jest.fn().mockResolvedValue([]),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
disconnectServer: jest.fn().mockResolvedValue(),
|
||||
mcpConfigs: {},
|
||||
getUserConnection: jest.fn().mockResolvedValue(null),
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
endpoint: 'http://test-server.com',
|
||||
customUserVars: {
|
||||
API_KEY: 'some-env-var',
|
||||
},
|
||||
}),
|
||||
disconnectUserConnection: jest.fn().mockResolvedValue(),
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('@librechat/api').getUserMCPAuthMap.mockResolvedValue({
|
||||
'mcp:test-server': {
|
||||
API_KEY: 'api-key-value',
|
||||
},
|
||||
});
|
||||
require('~/models').findPluginAuthsByKeys.mockResolvedValue([
|
||||
{ key: 'API_KEY', value: 'api-key-value' },
|
||||
]);
|
||||
|
||||
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
getCachedTools.mockResolvedValue({
|
||||
[`existing-tool${Constants.mcp_delimiter}test-server`]: { type: 'function' },
|
||||
});
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
getCachedTools.mockResolvedValue({});
|
||||
setCachedTools.mockResolvedValue();
|
||||
updateMCPUserTools.mockResolvedValue();
|
||||
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
serverName: 'test-server',
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.message).toBe("Failed to reinitialize MCP server 'test-server'");
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(require('@librechat/api').getUserMCPAuthMap).toHaveBeenCalledWith({
|
||||
userId: 'test-user-id',
|
||||
servers: ['test-server'],
|
||||
findPluginAuthsByKeys: require('~/models').findPluginAuthsByKeys,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -984,21 +1026,19 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
describe('GET /:serverName/auth-values', () => {
|
||||
const { loadCustomConfig } = require('~/server/services/Config');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
|
||||
it('should return auth value flags for server', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: {
|
||||
API_KEY: 'some-env-var',
|
||||
SECRET_TOKEN: 'another-env-var',
|
||||
},
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
customUserVars: {
|
||||
API_KEY: 'some-env-var',
|
||||
SECRET_TOKEN: 'another-env-var',
|
||||
},
|
||||
},
|
||||
});
|
||||
}),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
getUserPluginAuthValue.mockResolvedValueOnce('some-api-key-value').mockResolvedValueOnce('');
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/auth-values');
|
||||
@@ -1017,11 +1057,11 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should return 404 when server is not found in configuration', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'other-server': {},
|
||||
},
|
||||
});
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue(null),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).get('/api/mcp/non-existent-server/auth-values');
|
||||
|
||||
@@ -1032,16 +1072,15 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should handle errors when checking auth values', async () => {
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: {
|
||||
API_KEY: 'some-env-var',
|
||||
},
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
customUserVars: {
|
||||
API_KEY: 'some-env-var',
|
||||
},
|
||||
},
|
||||
});
|
||||
}),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
getUserPluginAuthValue.mockRejectedValue(new Error('Database error'));
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/auth-values');
|
||||
@@ -1057,7 +1096,13 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should return 500 when auth values check throws unexpected error', async () => {
|
||||
loadCustomConfig.mockRejectedValue(new Error('Config loading failed'));
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockImplementation(() => {
|
||||
throw new Error('Config loading failed');
|
||||
}),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/auth-values');
|
||||
|
||||
@@ -1066,14 +1111,13 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
|
||||
it('should handle customUserVars that is not an object', async () => {
|
||||
const { loadCustomConfig } = require('~/server/services/Config');
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
customUserVars: 'not-an-object',
|
||||
},
|
||||
},
|
||||
});
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
customUserVars: 'not-an-object',
|
||||
}),
|
||||
};
|
||||
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/auth-values');
|
||||
|
||||
@@ -1097,98 +1141,6 @@ describe('MCP Routes', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /:serverName/reinitialize - Tool Deletion Coverage', () => {
|
||||
it('should handle null cached tools during reinitialize (triggers || {} fallback)', async () => {
|
||||
const { loadCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
|
||||
const mockUserConnection = {
|
||||
fetchTools: jest.fn().mockResolvedValue([{ name: 'new-tool', description: 'A new tool' }]),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
disconnectServer: jest.fn(),
|
||||
initializeServer: jest.fn(),
|
||||
mcpConfigs: {},
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': { env: { API_KEY: 'test-key' } },
|
||||
},
|
||||
});
|
||||
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize').expect(200);
|
||||
|
||||
expect(response.body).toEqual({
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
success: true,
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
serverName: 'test-server',
|
||||
});
|
||||
});
|
||||
|
||||
it('should delete existing cached tools during successful reinitialize', async () => {
|
||||
const {
|
||||
loadCustomConfig,
|
||||
getCachedTools,
|
||||
setCachedTools,
|
||||
} = require('~/server/services/Config');
|
||||
|
||||
const mockUserConnection = {
|
||||
fetchTools: jest.fn().mockResolvedValue([{ name: 'new-tool', description: 'A new tool' }]),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
disconnectServer: jest.fn(),
|
||||
initializeServer: jest.fn(),
|
||||
mcpConfigs: {},
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
loadCustomConfig.mockResolvedValue({
|
||||
mcpServers: {
|
||||
'test-server': { env: { API_KEY: 'test-key' } },
|
||||
},
|
||||
});
|
||||
|
||||
const existingTools = {
|
||||
'old-tool_mcp_test-server': { type: 'function' },
|
||||
'other-tool_mcp_other-server': { type: 'function' },
|
||||
};
|
||||
getCachedTools.mockResolvedValue(existingTools);
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize').expect(200);
|
||||
|
||||
expect(response.body).toEqual({
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
success: true,
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
serverName: 'test-server',
|
||||
});
|
||||
|
||||
expect(setCachedTools).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
'new-tool_mcp_test-server': expect.any(Object),
|
||||
'other-tool_mcp_other-server': { type: 'function' },
|
||||
}),
|
||||
{ userId: 'test-user-id' },
|
||||
);
|
||||
expect(setCachedTools).toHaveBeenCalledWith(
|
||||
expect.not.objectContaining({
|
||||
'old-tool_mcp_test-server': expect.anything(),
|
||||
}),
|
||||
{ userId: 'test-user-id' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /:serverName/oauth/callback - Edge Cases', () => {
|
||||
it('should handle OAuth callback without toolFlowId (falsy toolFlowId)', async () => {
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
|
||||
@@ -46,7 +46,7 @@ router.use('/tools', tools);
|
||||
|
||||
/**
|
||||
* Get all agent categories with counts
|
||||
* @route GET /agents/marketplace/categories
|
||||
* @route GET /agents/categories
|
||||
*/
|
||||
router.get('/categories', v1.getAgentCategories);
|
||||
/**
|
||||
|
||||
@@ -1,75 +1,75 @@
|
||||
const express = require('express');
|
||||
const { createSetBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
refreshController,
|
||||
registrationController,
|
||||
resetPasswordController,
|
||||
resetPasswordRequestController,
|
||||
resetPasswordController,
|
||||
registrationController,
|
||||
graphTokenController,
|
||||
refreshController,
|
||||
} = require('~/server/controllers/AuthController');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { logoutController } = require('~/server/controllers/auth/LogoutController');
|
||||
const { verify2FAWithTempToken } = require('~/server/controllers/auth/TwoFactorAuthController');
|
||||
const {
|
||||
regenerateBackupCodes,
|
||||
disable2FA,
|
||||
confirm2FA,
|
||||
enable2FA,
|
||||
verify2FA,
|
||||
disable2FA,
|
||||
regenerateBackupCodes,
|
||||
confirm2FA,
|
||||
} = require('~/server/controllers/TwoFactorController');
|
||||
const {
|
||||
checkBan,
|
||||
logHeaders,
|
||||
loginLimiter,
|
||||
requireJwtAuth,
|
||||
checkInviteUser,
|
||||
registerLimiter,
|
||||
requireLdapAuth,
|
||||
setBalanceConfig,
|
||||
requireLocalAuth,
|
||||
resetPasswordLimiter,
|
||||
validateRegistration,
|
||||
validatePasswordReset,
|
||||
} = require('~/server/middleware');
|
||||
const { verify2FAWithTempToken } = require('~/server/controllers/auth/TwoFactorAuthController');
|
||||
const { logoutController } = require('~/server/controllers/auth/LogoutController');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const middleware = require('~/server/middleware');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
const setBalanceConfig = createSetBalanceConfig({
|
||||
getBalanceConfig,
|
||||
Balance,
|
||||
});
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const ldapAuth = !!process.env.LDAP_URL && !!process.env.LDAP_USER_SEARCH_BASE;
|
||||
//Local
|
||||
router.post('/logout', requireJwtAuth, logoutController);
|
||||
router.post('/logout', middleware.requireJwtAuth, logoutController);
|
||||
router.post(
|
||||
'/login',
|
||||
logHeaders,
|
||||
loginLimiter,
|
||||
checkBan,
|
||||
ldapAuth ? requireLdapAuth : requireLocalAuth,
|
||||
middleware.logHeaders,
|
||||
middleware.loginLimiter,
|
||||
middleware.checkBan,
|
||||
ldapAuth ? middleware.requireLdapAuth : middleware.requireLocalAuth,
|
||||
setBalanceConfig,
|
||||
loginController,
|
||||
);
|
||||
router.post('/refresh', refreshController);
|
||||
router.post(
|
||||
'/register',
|
||||
registerLimiter,
|
||||
checkBan,
|
||||
checkInviteUser,
|
||||
validateRegistration,
|
||||
middleware.registerLimiter,
|
||||
middleware.checkBan,
|
||||
middleware.checkInviteUser,
|
||||
middleware.validateRegistration,
|
||||
registrationController,
|
||||
);
|
||||
router.post(
|
||||
'/requestPasswordReset',
|
||||
resetPasswordLimiter,
|
||||
checkBan,
|
||||
validatePasswordReset,
|
||||
middleware.resetPasswordLimiter,
|
||||
middleware.checkBan,
|
||||
middleware.validatePasswordReset,
|
||||
resetPasswordRequestController,
|
||||
);
|
||||
router.post('/resetPassword', checkBan, validatePasswordReset, resetPasswordController);
|
||||
router.post(
|
||||
'/resetPassword',
|
||||
middleware.checkBan,
|
||||
middleware.validatePasswordReset,
|
||||
resetPasswordController,
|
||||
);
|
||||
|
||||
router.get('/2fa/enable', requireJwtAuth, enable2FA);
|
||||
router.post('/2fa/verify', requireJwtAuth, verify2FA);
|
||||
router.post('/2fa/verify-temp', checkBan, verify2FAWithTempToken);
|
||||
router.post('/2fa/confirm', requireJwtAuth, confirm2FA);
|
||||
router.post('/2fa/disable', requireJwtAuth, disable2FA);
|
||||
router.post('/2fa/backup/regenerate', requireJwtAuth, regenerateBackupCodes);
|
||||
router.get('/2fa/enable', middleware.requireJwtAuth, enable2FA);
|
||||
router.post('/2fa/verify', middleware.requireJwtAuth, verify2FA);
|
||||
router.post('/2fa/verify-temp', middleware.checkBan, verify2FAWithTempToken);
|
||||
router.post('/2fa/confirm', middleware.requireJwtAuth, confirm2FA);
|
||||
router.post('/2fa/disable', middleware.requireJwtAuth, disable2FA);
|
||||
router.post('/2fa/backup/regenerate', middleware.requireJwtAuth, regenerateBackupCodes);
|
||||
|
||||
router.get('/graph-token', requireJwtAuth, graphTokenController);
|
||||
router.get('/graph-token', middleware.requireJwtAuth, graphTokenController);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -111,17 +111,20 @@ router.get('/', async function (req, res) {
|
||||
payload.mcpServers = {};
|
||||
const config = await getCustomConfig();
|
||||
if (config?.mcpServers != null) {
|
||||
const mcpManager = getMCPManager();
|
||||
const oauthServers = mcpManager.getOAuthServers();
|
||||
|
||||
for (const serverName in config.mcpServers) {
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = {
|
||||
customUserVars: serverConfig?.customUserVars || {},
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
isOAuth: oauthServers.has(serverName),
|
||||
startup: serverConfig?.startup,
|
||||
};
|
||||
try {
|
||||
const mcpManager = getMCPManager();
|
||||
const oauthServers = mcpManager.getOAuthServers();
|
||||
for (const serverName in config.mcpServers) {
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = {
|
||||
startup: serverConfig?.startup,
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
isOAuth: oauthServers?.has(serverName),
|
||||
customUserVars: serverConfig?.customUserVars || {},
|
||||
};
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error loading MCP servers', err);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -397,13 +397,15 @@ router.post('/', async (req, res) => {
|
||||
logger.error('[/files] Error deleting file:', error);
|
||||
}
|
||||
res.status(500).json({ message });
|
||||
}
|
||||
|
||||
if (cleanup) {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error deleting file after file processing:', error);
|
||||
} finally {
|
||||
if (cleanup) {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error deleting file after file processing:', error);
|
||||
}
|
||||
} else {
|
||||
logger.debug('[/files] File processing completed without cleanup');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
const { Router } = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { setCachedTools, getCachedTools, loadCustomConfig } = require('~/server/services/Config');
|
||||
const { MCPOAuthHandler, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { getMCPSetupData, getServerConnectionStatus } = require('~/server/services/MCP');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { reinitMCPServer } = require('~/server/services/Tools/mcp');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { findPluginAuthsByKeys } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const router = Router();
|
||||
@@ -142,33 +144,12 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
`[MCP OAuth] Successfully reconnected ${serverName} for user ${flowState.userId}`,
|
||||
);
|
||||
|
||||
const userTools = (await getCachedTools({ userId: flowState.userId })) || {};
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: flowState.userId });
|
||||
|
||||
logger.debug(
|
||||
`[MCP OAuth] Cached ${tools.length} tools for ${serverName} user ${flowState.userId}`,
|
||||
);
|
||||
await updateMCPUserTools({
|
||||
userId: flowState.userId,
|
||||
serverName,
|
||||
tools,
|
||||
});
|
||||
} else {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
}
|
||||
@@ -315,133 +296,47 @@ router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => {
|
||||
|
||||
logger.info(`[MCP Reinitialize] Reinitializing server: ${serverName}`);
|
||||
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
const mcpManager = getMCPManager();
|
||||
const serverConfig = mcpManager.getRawConfig(serverName);
|
||||
if (!serverConfig) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
await mcpManager.disconnectServer(serverName);
|
||||
logger.info(`[MCP Reinitialize] Disconnected existing server: ${serverName}`);
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
mcpManager.mcpConfigs[serverName] = serverConfig;
|
||||
let customUserVars = {};
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false);
|
||||
customUserVars[varName] = value;
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error fetching ${varName} for user ${user.id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let userConnection = null;
|
||||
let oauthRequired = false;
|
||||
let oauthUrl = null;
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
customUserVars,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
returnOnOAuth: true,
|
||||
oauthStart: async (authURL) => {
|
||||
logger.info(`[MCP Reinitialize] OAuth URL received: ${authURL}`);
|
||||
oauthUrl = authURL;
|
||||
oauthRequired = true;
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`[MCP Reinitialize] Successfully established connection for ${serverName}`);
|
||||
} catch (err) {
|
||||
logger.info(`[MCP Reinitialize] getUserConnection threw error: ${err.message}`);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth state - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const isOAuthError =
|
||||
err.message?.includes('OAuth') ||
|
||||
err.message?.includes('authentication') ||
|
||||
err.message?.includes('401');
|
||||
|
||||
const isOAuthFlowInitiated = err.message === 'OAuth flow initiated - return early';
|
||||
|
||||
if (isOAuthError || oauthRequired || isOAuthFlowInitiated) {
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth required for ${serverName} (isOAuthError: ${isOAuthError}, oauthRequired: ${oauthRequired}, isOAuthFlowInitiated: ${isOAuthFlowInitiated})`,
|
||||
);
|
||||
oauthRequired = true;
|
||||
} else {
|
||||
logger.error(
|
||||
`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`,
|
||||
err,
|
||||
);
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
}
|
||||
|
||||
if (userConnection && !oauthRequired) {
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`[MCP Reinitialize] Sending response for ${serverName} - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] Disconnected existing user connection for server: ${serverName}`,
|
||||
);
|
||||
|
||||
const getResponseMessage = () => {
|
||||
if (oauthRequired) {
|
||||
return `MCP server '${serverName}' ready for OAuth authentication`;
|
||||
}
|
||||
if (userConnection) {
|
||||
return `MCP server '${serverName}' reinitialized successfully`;
|
||||
}
|
||||
return `Failed to reinitialize MCP server '${serverName}'`;
|
||||
};
|
||||
/** @type {Record<string, Record<string, string>> | undefined} */
|
||||
let userMCPAuthMap;
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
userMCPAuthMap = await getUserMCPAuthMap({
|
||||
userId: user.id,
|
||||
servers: [serverName],
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await reinitMCPServer({
|
||||
req,
|
||||
serverName,
|
||||
userMCPAuthMap,
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
|
||||
const { success, message, oauthRequired, oauthUrl } = result;
|
||||
|
||||
res.json({
|
||||
success: (userConnection && !oauthRequired) || (oauthRequired && oauthUrl),
|
||||
message: getResponseMessage(),
|
||||
success,
|
||||
message,
|
||||
oauthUrl,
|
||||
serverName,
|
||||
oauthRequired,
|
||||
oauthUrl,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP Reinitialize] Unexpected error', error);
|
||||
@@ -551,15 +446,14 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
||||
return res.status(401).json({ error: 'User not authenticated' });
|
||||
}
|
||||
|
||||
const printConfig = false;
|
||||
const config = await loadCustomConfig(printConfig);
|
||||
if (!config || !config.mcpServers || !config.mcpServers[serverName]) {
|
||||
const mcpManager = getMCPManager();
|
||||
const serverConfig = mcpManager.getRawConfig(serverName);
|
||||
if (!serverConfig) {
|
||||
return res.status(404).json({
|
||||
error: `MCP server '${serverName}' not found in configuration`,
|
||||
});
|
||||
}
|
||||
|
||||
const serverConfig = config.mcpServers[serverName];
|
||||
const pluginKey = `${Constants.mcp_prefix}${serverName}`;
|
||||
const authValueFlags = {};
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const memoryPayloadLimit = express.json({ limit: '100kb' });
|
||||
|
||||
const checkMemoryRead = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.READ],
|
||||
@@ -60,6 +62,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
let usagePercentage = null;
|
||||
if (tokenLimit && tokenLimit > 0) {
|
||||
@@ -70,6 +73,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
memories: sortedMemories,
|
||||
totalTokens,
|
||||
tokenLimit: tokenLimit || null,
|
||||
charLimit,
|
||||
usagePercentage,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -83,7 +87,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
||||
* Body: { key: string, value: string }
|
||||
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
|
||||
*/
|
||||
router.post('/', checkMemoryCreate, async (req, res) => {
|
||||
router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
|
||||
const { key, value } = req.body;
|
||||
|
||||
if (typeof key !== 'string' || key.trim() === '') {
|
||||
@@ -94,13 +98,25 @@ router.post('/', checkMemoryCreate, async (req, res) => {
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (key.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${key.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
|
||||
// Check token limit
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
|
||||
if (tokenLimit) {
|
||||
@@ -175,7 +191,7 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
|
||||
* Body: { key?: string, value: string }
|
||||
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
|
||||
*/
|
||||
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) => {
|
||||
const { key: urlKey } = req.params;
|
||||
const { key: bodyKey, value } = req.body || {};
|
||||
|
||||
@@ -183,9 +199,23 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
// Use the key from the body if provided, otherwise use the key from the URL
|
||||
const newKey = bodyKey || urlKey;
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (newKey.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${newKey.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
@@ -196,7 +226,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(404).json({ error: 'Memory not found.' });
|
||||
}
|
||||
|
||||
// If the key is changing, we need to handle it specially
|
||||
if (newKey !== urlKey) {
|
||||
const keyExists = memories.find((m) => m.key === newKey);
|
||||
if (keyExists) {
|
||||
@@ -219,7 +248,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
return res.status(500).json({ error: 'Failed to delete old memory.' });
|
||||
}
|
||||
} else {
|
||||
// Key is not changing, just update the value
|
||||
const result = await setMemory({
|
||||
userId: req.user.id,
|
||||
key: newKey,
|
||||
|
||||
@@ -2,17 +2,19 @@
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const { randomState } = require('openid-client');
|
||||
const {
|
||||
checkBan,
|
||||
logHeaders,
|
||||
loginLimiter,
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
} = require('~/server/middleware');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, createSetBalanceConfig } = require('@librechat/api');
|
||||
const { checkDomainAllowed, loginLimiter, logHeaders, checkBan } = require('~/server/middleware');
|
||||
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
const setBalanceConfig = createSetBalanceConfig({
|
||||
getBalanceConfig,
|
||||
Balance,
|
||||
});
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -48,13 +50,13 @@ const oauthHandler = async (req, res) => {
|
||||
};
|
||||
|
||||
router.get('/error', (req, res) => {
|
||||
// A single error message is pushed by passport when authentication fails.
|
||||
/** A single error message is pushed by passport when authentication fails. */
|
||||
const errorMessage = req.session?.messages?.pop() || 'Unknown error';
|
||||
logger.error('Error in OAuth authentication:', {
|
||||
message: req.session?.messages?.pop() || 'Unknown error',
|
||||
message: errorMessage,
|
||||
});
|
||||
|
||||
// Redirect to login page with auth_failed parameter to prevent infinite redirect loops
|
||||
res.redirect(`${domains.client}/login?redirect=false`);
|
||||
res.redirect(`${domains.client}/login?redirect=false&error=${ErrorTypes.AUTH_FAILED}`);
|
||||
});
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const {
|
||||
generateCheckAccess,
|
||||
markPublicPromptGroups,
|
||||
buildPromptGroupFilter,
|
||||
formatPromptGroupsResponse,
|
||||
createEmptyPromptGroupsResponse,
|
||||
filterAccessibleIdsBySharedLogic,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Permissions,
|
||||
SystemRoles,
|
||||
@@ -11,12 +18,11 @@ const {
|
||||
PermissionTypes,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getListPromptGroupsByAccess,
|
||||
makePromptProduction,
|
||||
getAllPromptGroups,
|
||||
updatePromptGroup,
|
||||
deletePromptGroup,
|
||||
createPromptGroup,
|
||||
getPromptGroups,
|
||||
getPromptGroup,
|
||||
deletePrompt,
|
||||
getPrompts,
|
||||
@@ -95,23 +101,48 @@ router.get(
|
||||
router.get('/all', async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { name, category, ...otherFilters } = req.query;
|
||||
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
|
||||
name,
|
||||
category,
|
||||
...otherFilters,
|
||||
});
|
||||
|
||||
// Get promptGroup IDs the user has VIEW access to via ACL
|
||||
const accessibleIds = await findAccessibleResources({
|
||||
let accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const groups = await getAllPromptGroups(req, {});
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
// Filter the results to only include accessible groups
|
||||
const accessibleGroups = groups.filter((group) =>
|
||||
accessibleIds.some((id) => id.toString() === group._id.toString()),
|
||||
);
|
||||
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
|
||||
accessibleIds,
|
||||
searchShared,
|
||||
searchSharedOnly,
|
||||
publicPromptGroupIds: publiclyAccessibleIds,
|
||||
});
|
||||
|
||||
res.status(200).send(accessibleGroups);
|
||||
const result = await getListPromptGroupsByAccess({
|
||||
accessibleIds: filteredAccessibleIds,
|
||||
otherParams: filter,
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
return res.status(200).send([]);
|
||||
}
|
||||
|
||||
const { data: promptGroups = [] } = result;
|
||||
if (!promptGroups.length) {
|
||||
return res.status(200).send([]);
|
||||
}
|
||||
|
||||
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
|
||||
res.status(200).send(groupsWithPublicFlag);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error getting prompt groups' });
|
||||
@@ -125,40 +156,66 @@ router.get('/all', async (req, res) => {
|
||||
router.get('/groups', async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const filter = { ...req.query };
|
||||
delete filter.author; // Remove author filter as we'll use ACL
|
||||
const { pageSize, pageNumber, limit, cursor, name, category, ...otherFilters } = req.query;
|
||||
|
||||
// Get promptGroup IDs the user has VIEW access to via ACL
|
||||
const accessibleIds = await findAccessibleResources({
|
||||
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
|
||||
name,
|
||||
category,
|
||||
...otherFilters,
|
||||
});
|
||||
|
||||
let actualLimit = limit;
|
||||
let actualCursor = cursor;
|
||||
|
||||
if (pageSize && !limit) {
|
||||
actualLimit = parseInt(pageSize, 10);
|
||||
}
|
||||
|
||||
let accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
// Get publicly accessible promptGroups
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
const groups = await getPromptGroups(req, filter);
|
||||
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
|
||||
accessibleIds,
|
||||
searchShared,
|
||||
searchSharedOnly,
|
||||
publicPromptGroupIds: publiclyAccessibleIds,
|
||||
});
|
||||
|
||||
if (groups.promptGroups && groups.promptGroups.length > 0) {
|
||||
groups.promptGroups = groups.promptGroups.filter((group) =>
|
||||
accessibleIds.some((id) => id.toString() === group._id.toString()),
|
||||
);
|
||||
const result = await getListPromptGroupsByAccess({
|
||||
accessibleIds: filteredAccessibleIds,
|
||||
otherParams: filter,
|
||||
limit: actualLimit,
|
||||
after: actualCursor,
|
||||
});
|
||||
|
||||
// Mark public groups
|
||||
groups.promptGroups = groups.promptGroups.map((group) => {
|
||||
if (publiclyAccessibleIds.some((id) => id.equals(group._id))) {
|
||||
group.isPublic = true;
|
||||
}
|
||||
return group;
|
||||
});
|
||||
if (!result) {
|
||||
const emptyResponse = createEmptyPromptGroupsResponse({ pageNumber, pageSize, actualLimit });
|
||||
return res.status(200).send(emptyResponse);
|
||||
}
|
||||
|
||||
res.status(200).send(groups);
|
||||
const { data: promptGroups = [], has_more = false, after = null } = result;
|
||||
|
||||
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
|
||||
|
||||
const response = formatPromptGroupsResponse({
|
||||
promptGroups: groupsWithPublicFlag,
|
||||
pageNumber,
|
||||
pageSize,
|
||||
actualLimit,
|
||||
hasMore: has_more,
|
||||
after,
|
||||
});
|
||||
|
||||
res.status(200).send(response);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
res.status(500).send({ error: 'Error getting prompt groups' });
|
||||
@@ -188,7 +245,6 @@ const createNewPromptGroup = async (req, res) => {
|
||||
|
||||
const result = await createPromptGroup(saveData);
|
||||
|
||||
// Grant owner permissions to the creator on the new promptGroup
|
||||
if (result.prompt && result.prompt._id && result.prompt.groupId) {
|
||||
try {
|
||||
await grantPermission({
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
const express = require('express');
|
||||
const {
|
||||
SystemRoles,
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
memoryPermissionsSchema,
|
||||
agentPermissionsSchema,
|
||||
PermissionTypes,
|
||||
roleDefaults,
|
||||
SystemRoles,
|
||||
marketplacePermissionsSchema,
|
||||
peoplePickerPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const { checkAdmin, requireJwtAuth } = require('~/server/middleware');
|
||||
const { updateRoleByName, getRoleByName } = require('~/models/Role');
|
||||
@@ -13,6 +15,81 @@ const { updateRoleByName, getRoleByName } = require('~/models/Role');
|
||||
const router = express.Router();
|
||||
router.use(requireJwtAuth);
|
||||
|
||||
/**
|
||||
* Permission configuration mapping
|
||||
* Maps route paths to their corresponding schemas and permission types
|
||||
*/
|
||||
const permissionConfigs = {
|
||||
prompts: {
|
||||
schema: promptPermissionsSchema,
|
||||
permissionType: PermissionTypes.PROMPTS,
|
||||
errorMessage: 'Invalid prompt permissions.',
|
||||
},
|
||||
agents: {
|
||||
schema: agentPermissionsSchema,
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
errorMessage: 'Invalid agent permissions.',
|
||||
},
|
||||
memories: {
|
||||
schema: memoryPermissionsSchema,
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
errorMessage: 'Invalid memory permissions.',
|
||||
},
|
||||
'people-picker': {
|
||||
schema: peoplePickerPermissionsSchema,
|
||||
permissionType: PermissionTypes.PEOPLE_PICKER,
|
||||
errorMessage: 'Invalid people picker permissions.',
|
||||
},
|
||||
marketplace: {
|
||||
schema: marketplacePermissionsSchema,
|
||||
permissionType: PermissionTypes.MARKETPLACE,
|
||||
errorMessage: 'Invalid marketplace permissions.',
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic handler for updating permissions
|
||||
* @param {string} permissionKey - The key from permissionConfigs
|
||||
* @returns {Function} Express route handler
|
||||
*/
|
||||
const createPermissionUpdateHandler = (permissionKey) => {
|
||||
const config = permissionConfigs[permissionKey];
|
||||
|
||||
return async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = config.schema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[config.permissionType] || role[config.permissionType] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[config.permissionType]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: config.errorMessage, error: error.errors });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* GET /api/roles/:roleName
|
||||
* Get a specific role by name
|
||||
@@ -45,117 +122,30 @@ router.get('/:roleName', async (req, res) => {
|
||||
* PUT /api/roles/:roleName/prompts
|
||||
* Update prompt permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/prompts', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['PROMPTS']} */
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = promptPermissionsSchema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.PROMPTS] || role[PermissionTypes.PROMPTS] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid prompt permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
router.put('/:roleName/prompts', checkAdmin, createPermissionUpdateHandler('prompts'));
|
||||
|
||||
/**
|
||||
* PUT /api/roles/:roleName/agents
|
||||
* Update agent permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/agents', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['AGENTS']} */
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
const parsedUpdates = agentPermissionsSchema.partial().parse(updates);
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.AGENTS] || role[PermissionTypes.AGENTS] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.AGENTS]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid agent permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
router.put('/:roleName/agents', checkAdmin, createPermissionUpdateHandler('agents'));
|
||||
|
||||
/**
|
||||
* PUT /api/roles/:roleName/memories
|
||||
* Update memory permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/memories', checkAdmin, async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
/** @type {TRole['permissions']['MEMORIES']} */
|
||||
const updates = req.body;
|
||||
router.put('/:roleName/memories', checkAdmin, createPermissionUpdateHandler('memories'));
|
||||
|
||||
try {
|
||||
const parsedUpdates = memoryPermissionsSchema.partial().parse(updates);
|
||||
/**
|
||||
* PUT /api/roles/:roleName/people-picker
|
||||
* Update people picker permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/people-picker', checkAdmin, createPermissionUpdateHandler('people-picker'));
|
||||
|
||||
const role = await getRoleByName(roleName);
|
||||
if (!role) {
|
||||
return res.status(404).send({ message: 'Role not found' });
|
||||
}
|
||||
|
||||
const currentPermissions =
|
||||
role.permissions?.[PermissionTypes.MEMORIES] || role[PermissionTypes.MEMORIES] || {};
|
||||
|
||||
const mergedUpdates = {
|
||||
permissions: {
|
||||
...role.permissions,
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
...currentPermissions,
|
||||
...parsedUpdates,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
|
||||
res.status(200).send(updatedRole);
|
||||
} catch (error) {
|
||||
return res.status(400).send({ message: 'Invalid memory permissions.', error: error.errors });
|
||||
}
|
||||
});
|
||||
/**
|
||||
* PUT /api/roles/:roleName/marketplace
|
||||
* Update marketplace permissions for a specific role
|
||||
*/
|
||||
router.put('/:roleName/marketplace', checkAdmin, createPermissionUpdateHandler('marketplace'));
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -5,7 +5,7 @@ jest.mock('~/models', () => ({
|
||||
}));
|
||||
jest.mock('~/models/Role', () => ({
|
||||
updateAccessPermissions: jest.fn(),
|
||||
getRoleByName: jest.fn(),
|
||||
getRoleByName: jest.fn().mockResolvedValue(null),
|
||||
updateRoleByName: jest.fn(),
|
||||
}));
|
||||
|
||||
@@ -89,4 +89,76 @@ describe('AppService interface configuration', () => {
|
||||
expect(app.locals.interfaceConfig.bookmarks).toBe(false);
|
||||
expect(loadDefaultInterface).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions including roles', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
});
|
||||
expect(loadDefaultInterface).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle mixed peoplePicker permissions', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(false);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
|
||||
});
|
||||
|
||||
it('should set default peoplePicker permissions when not provided', async () => {
|
||||
mockLoadCustomConfig.mockResolvedValue({});
|
||||
loadDefaultInterface.mockResolvedValue({
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
});
|
||||
|
||||
await AppService(app);
|
||||
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(true);
|
||||
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,22 +1,26 @@
|
||||
const { agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const {
|
||||
isEnabled,
|
||||
loadMemoryConfig,
|
||||
agentsConfigSetup,
|
||||
loadWebSearchConfig,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
loadOCRConfig,
|
||||
EModelEndpoint,
|
||||
loadMemoryConfig,
|
||||
getConfigDefaults,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
checkWebSearchConfig,
|
||||
checkAzureVariables,
|
||||
checkVariables,
|
||||
checkHealth,
|
||||
checkConfig,
|
||||
checkVariables,
|
||||
checkAzureVariables,
|
||||
checkWebSearchConfig,
|
||||
} = require('./start/checks');
|
||||
const { ensureDefaultCategories, seedDefaultRoles, initializeRoles } = require('~/models');
|
||||
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
|
||||
const { initializeAzureBlobService } = require('./Files/Azure/initialize');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const { seedDefaultRoles, initializeRoles, ensureDefaultCategories } = require('~/models');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const handleRateLimits = require('./Config/handleRateLimits');
|
||||
const { loadDefaultInterface } = require('./start/interface');
|
||||
@@ -25,7 +29,6 @@ const { azureConfigSetup } = require('./start/azureOpenAI');
|
||||
const { processModelSpecs } = require('./start/modelSpecs');
|
||||
const { initializeS3 } = require('./Files/S3/initialize');
|
||||
const { loadAndFormatTools } = require('./ToolService');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { setCachedTools } = require('./Config');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ jest.mock('~/models', () => ({
|
||||
}));
|
||||
jest.mock('~/models/Role', () => ({
|
||||
updateAccessPermissions: jest.fn(),
|
||||
getRoleByName: jest.fn().mockResolvedValue(null),
|
||||
}));
|
||||
jest.mock('./Config', () => ({
|
||||
setCachedTools: jest.fn(),
|
||||
@@ -969,4 +970,29 @@ describe('AppService updating app.locals and issuing warnings', () => {
|
||||
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
|
||||
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions when specified', async () => {
|
||||
const mockConfig = {
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
|
||||
|
||||
const app = { locals: {} };
|
||||
await AppService(app);
|
||||
|
||||
// Check that interface config includes the permissions
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
|
||||
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -60,7 +60,14 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
|
||||
|
||||
// Find boundaries between ARTIFACT_START and ARTIFACT_END
|
||||
const contentStart = artifactContent.indexOf('\n', artifactContent.indexOf(ARTIFACT_START)) + 1;
|
||||
const contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
|
||||
let contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
|
||||
|
||||
// Special case: if contentEnd is 0, it means the only ::: found is at the start of :::artifact
|
||||
// This indicates an incomplete artifact (no closing :::)
|
||||
// We need to check that it's exactly at position 0 (the beginning of artifactContent)
|
||||
if (contentEnd === 0 && artifactContent.indexOf(ARTIFACT_START) === 0) {
|
||||
contentEnd = artifactContent.length;
|
||||
}
|
||||
|
||||
if (contentStart === -1 || contentEnd === -1) {
|
||||
return null;
|
||||
@@ -72,12 +79,20 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
|
||||
|
||||
// Determine where to look for the original content
|
||||
let searchStart, searchEnd;
|
||||
if (codeBlockStart !== -1 && codeBlockEnd !== -1) {
|
||||
// If code blocks exist, search between them
|
||||
if (codeBlockStart !== -1) {
|
||||
// Code block starts
|
||||
searchStart = codeBlockStart + 4; // after ```\n
|
||||
searchEnd = codeBlockEnd;
|
||||
|
||||
if (codeBlockEnd !== -1 && codeBlockEnd > codeBlockStart) {
|
||||
// Code block has proper ending
|
||||
searchEnd = codeBlockEnd;
|
||||
} else {
|
||||
// No closing backticks found or they're before the opening (shouldn't happen)
|
||||
// This might be an incomplete artifact - search to contentEnd
|
||||
searchEnd = contentEnd;
|
||||
}
|
||||
} else {
|
||||
// Otherwise search in the whole artifact content
|
||||
// No code blocks at all
|
||||
searchStart = contentStart;
|
||||
searchEnd = contentEnd;
|
||||
}
|
||||
|
||||
@@ -89,9 +89,9 @@ describe('replaceArtifactContent', () => {
|
||||
};
|
||||
|
||||
test('should replace content within artifact boundaries', () => {
|
||||
const original = 'console.log(\'hello\')';
|
||||
const original = "console.log('hello')";
|
||||
const artifact = createTestArtifact(original);
|
||||
const updated = 'console.log(\'updated\')';
|
||||
const updated = "console.log('updated')";
|
||||
|
||||
const result = replaceArtifactContent(artifact.text, artifact, original, updated);
|
||||
expect(result).toContain(updated);
|
||||
@@ -317,4 +317,182 @@ console.log(greeting);`;
|
||||
expect(result).not.toContain('\n\n```');
|
||||
expect(result).not.toContain('```\n\n');
|
||||
});
|
||||
|
||||
describe('incomplete artifacts', () => {
|
||||
test('should handle incomplete artifacts (missing closing ::: and ```)', () => {
|
||||
const original = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>Pomodoro</title>
|
||||
<meta name="description" content="A single-file Pomodoro timer with logs, charts, sounds, and dark mode." />
|
||||
<style>
|
||||
:root{`;
|
||||
|
||||
const prefix = `Awesome idea! I'll deliver a complete single-file HTML app called "Pomodoro" with:
|
||||
- Custom session/break durations
|
||||
|
||||
You can save this as pomodoro.html and open it directly in your browser.
|
||||
|
||||
`;
|
||||
|
||||
// This simulates the real incomplete artifact case - no closing ``` or :::
|
||||
const incompleteArtifact = `${ARTIFACT_START}{identifier="pomodoro-single-file-app" type="text/html" title="Pomodoro — Single File App"}
|
||||
\`\`\`
|
||||
${original}`;
|
||||
|
||||
const fullText = prefix + incompleteArtifact;
|
||||
const message = { text: fullText };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
expect(artifacts[0].end).toBe(fullText.length);
|
||||
|
||||
const updated = original.replace('Pomodoro</title>', 'Pomodoro</title>UPDATED');
|
||||
const result = replaceArtifactContent(fullText, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
expect(result).toContain(prefix);
|
||||
// Should not have added closing markers
|
||||
expect(result).not.toMatch(/:::\s*$/);
|
||||
});
|
||||
|
||||
test('should handle incomplete artifacts with only opening code block', () => {
|
||||
const original = 'function hello() { console.log("world"); }';
|
||||
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n\`\`\`\n${original}`;
|
||||
|
||||
const message = { text: incompleteArtifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'function hello() { console.log("UPDATED"); }';
|
||||
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
});
|
||||
|
||||
test('should handle incomplete artifacts without code blocks', () => {
|
||||
const original = 'Some plain text content';
|
||||
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n${original}`;
|
||||
|
||||
const message = { text: incompleteArtifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'Some UPDATED text content';
|
||||
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('UPDATED');
|
||||
});
|
||||
});
|
||||
|
||||
describe('regression tests for edge cases', () => {
|
||||
test('should still handle complete artifacts correctly', () => {
|
||||
// Ensure we didn't break normal artifact handling
|
||||
const original = 'console.log("test");';
|
||||
const artifact = createArtifactText({ content: original });
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'console.log("updated");';
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain(updated);
|
||||
expect(result).toContain(ARTIFACT_END);
|
||||
expect(result).toMatch(/```\nconsole\.log\("updated"\);\n```/);
|
||||
});
|
||||
|
||||
test('should handle multiple complete artifacts', () => {
|
||||
// Ensure multiple artifacts still work
|
||||
const content1 = 'First artifact';
|
||||
const content2 = 'Second artifact';
|
||||
const text = `${createArtifactText({ content: content1 })}\n\n${createArtifactText({ content: content2 })}`;
|
||||
|
||||
const message = { text };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(2);
|
||||
|
||||
// Update first artifact
|
||||
const result1 = replaceArtifactContent(text, artifacts[0], content1, 'First UPDATED');
|
||||
expect(result1).not.toBeNull();
|
||||
expect(result1).toContain('First UPDATED');
|
||||
expect(result1).toContain(content2);
|
||||
|
||||
// Update second artifact
|
||||
const result2 = replaceArtifactContent(text, artifacts[1], content2, 'Second UPDATED');
|
||||
expect(result2).not.toBeNull();
|
||||
expect(result2).toContain(content1);
|
||||
expect(result2).toContain('Second UPDATED');
|
||||
});
|
||||
|
||||
test('should not mistake ::: at position 0 for artifact end in complete artifacts', () => {
|
||||
// This tests the specific fix - ensuring contentEnd=0 doesn't break complete artifacts
|
||||
const original = 'test content';
|
||||
// Create an artifact that will have ::: at position 0 when substring'd
|
||||
const artifact = `${ARTIFACT_START}\n\`\`\`\n${original}\n\`\`\`\n${ARTIFACT_END}`;
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
const updated = 'updated content';
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain(updated);
|
||||
expect(result).toContain(ARTIFACT_END);
|
||||
});
|
||||
|
||||
test('should handle empty artifacts', () => {
|
||||
// Edge case: empty artifact
|
||||
const artifact = `${ARTIFACT_START}\n${ARTIFACT_END}`;
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
expect(artifacts).toHaveLength(1);
|
||||
|
||||
// Trying to replace non-existent content should return null
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], 'something', 'updated');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('should preserve whitespace and formatting in complete artifacts', () => {
|
||||
const original = ` function test() {
|
||||
return {
|
||||
value: 42
|
||||
};
|
||||
}`;
|
||||
const artifact = createArtifactText({ content: original });
|
||||
|
||||
const message = { text: artifact };
|
||||
const artifacts = findAllArtifacts(message);
|
||||
|
||||
const updated = ` function test() {
|
||||
return {
|
||||
value: 100
|
||||
};
|
||||
}`;
|
||||
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toContain('value: 100');
|
||||
// Should preserve exact formatting
|
||||
expect(result).toMatch(
|
||||
/```\n {2}function test\(\) \{\n {4}return \{\n {6}value: 100\n {4}\};\n {2}\}\n```/,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -26,7 +26,7 @@ const ToolCacheKeys = {
|
||||
* @param {string[]} [options.roleIds] - Role IDs for role-based tools
|
||||
* @param {string[]} [options.groupIds] - Group IDs for group-based tools
|
||||
* @param {boolean} [options.includeGlobal=true] - Whether to include global tools
|
||||
* @returns {Promise<Object|null>} The available tools object or null if not cached
|
||||
* @returns {Promise<LCAvailableTools|null>} The available tools object or null if not cached
|
||||
*/
|
||||
async function getCachedTools(options = {}) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
@@ -41,13 +41,13 @@ async function getCachedTools(options = {}) {
|
||||
// Future implementation will merge tools from multiple sources
|
||||
// based on user permissions, roles, and groups
|
||||
if (userId) {
|
||||
// Check if we have pre-computed effective tools for this user
|
||||
/** @type {LCAvailableTools | null} Check if we have pre-computed effective tools for this user */
|
||||
const effectiveTools = await cache.get(ToolCacheKeys.EFFECTIVE(userId));
|
||||
if (effectiveTools) {
|
||||
return effectiveTools;
|
||||
}
|
||||
|
||||
// Otherwise, compute from individual sources
|
||||
/** @type {LCAvailableTools | null} Otherwise, compute from individual sources */
|
||||
const toolSources = [];
|
||||
|
||||
if (includeGlobal) {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { normalizeEndpointName } = require('~/server/utils');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
@@ -53,31 +52,6 @@ const getCustomEndpointConfig = async (endpoint) => {
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId
|
||||
* @param {GenericTool[]} [params.tools]
|
||||
* @param {import('@librechat/data-schemas').PluginAuthMethods['findPluginAuthsByKeys']} params.findPluginAuthsByKeys
|
||||
* @returns {Promise<Record<string, Record<string, string>> | undefined>}
|
||||
*/
|
||||
async function getMCPAuthMap({ userId, tools, findPluginAuthsByKeys }) {
|
||||
try {
|
||||
if (!tools || tools.length === 0) {
|
||||
return;
|
||||
}
|
||||
return await getUserMCPAuthMap({
|
||||
tools,
|
||||
userId,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
@@ -88,7 +62,6 @@ async function hasCustomUserVars() {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMCPAuthMap,
|
||||
getCustomConfig,
|
||||
getBalanceConfig,
|
||||
hasCustomUserVars,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const { config } = require('./EndpointService');
|
||||
const getCachedTools = require('./getCachedTools');
|
||||
const getCustomConfig = require('./getCustomConfig');
|
||||
const mcpToolsCache = require('./mcpToolsCache');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const loadConfigModels = require('./loadConfigModels');
|
||||
const loadDefaultModels = require('./loadDefaultModels');
|
||||
@@ -17,5 +18,6 @@ module.exports = {
|
||||
loadAsyncEndpoints,
|
||||
...getCachedTools,
|
||||
...getCustomConfig,
|
||||
...mcpToolsCache,
|
||||
...getEndpointsConfig,
|
||||
};
|
||||
|
||||
@@ -76,10 +76,11 @@ async function loadConfigModels(req) {
|
||||
fetchPromisesMap[uniqueKey] =
|
||||
fetchPromisesMap[uniqueKey] ||
|
||||
fetchModels({
|
||||
user: req.user.id,
|
||||
baseURL: BASE_URL,
|
||||
apiKey: API_KEY,
|
||||
name,
|
||||
apiKey: API_KEY,
|
||||
baseURL: BASE_URL,
|
||||
user: req.user.id,
|
||||
direct: endpoint.directEndpoint,
|
||||
userIdQuery: models.userIdQuery,
|
||||
});
|
||||
uniqueKeyToEndpointsMap[uniqueKey] = uniqueKeyToEndpointsMap[uniqueKey] || [];
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { useAzurePlugins } = require('~/server/services/Config/EndpointService').config;
|
||||
const {
|
||||
getAnthropicModels,
|
||||
getBedrockModels,
|
||||
getOpenAIModels,
|
||||
getGoogleModels,
|
||||
getBedrockModels,
|
||||
getAnthropicModels,
|
||||
} = require('~/server/services/ModelService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Loads the default models for the application.
|
||||
@@ -16,58 +15,42 @@ const { logger } = require('~/config');
|
||||
*/
|
||||
async function loadDefaultModels(req) {
|
||||
try {
|
||||
const [
|
||||
openAI,
|
||||
anthropic,
|
||||
azureOpenAI,
|
||||
gptPlugins,
|
||||
assistants,
|
||||
azureAssistants,
|
||||
google,
|
||||
bedrock,
|
||||
] = await Promise.all([
|
||||
getOpenAIModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI models:', error);
|
||||
return [];
|
||||
}),
|
||||
getAnthropicModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching Anthropic models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ user: req.user.id, azure: useAzurePlugins, plugins: true }).catch(
|
||||
(error) => {
|
||||
logger.error('Error fetching Plugin models:', error);
|
||||
const [openAI, anthropic, azureOpenAI, assistants, azureAssistants, google, bedrock] =
|
||||
await Promise.all([
|
||||
getOpenAIModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI models:', error);
|
||||
return [];
|
||||
},
|
||||
),
|
||||
getOpenAIModels({ assistants: true }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ azureAssistants: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getGoogleModels()).catch((error) => {
|
||||
logger.error('Error getting Google models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getBedrockModels()).catch((error) => {
|
||||
logger.error('Error getting Bedrock models:', error);
|
||||
return [];
|
||||
}),
|
||||
]);
|
||||
}),
|
||||
getAnthropicModels({ user: req.user.id }).catch((error) => {
|
||||
logger.error('Error fetching Anthropic models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ assistants: true }).catch((error) => {
|
||||
logger.error('Error fetching OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
getOpenAIModels({ azureAssistants: true }).catch((error) => {
|
||||
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getGoogleModels()).catch((error) => {
|
||||
logger.error('Error getting Google models:', error);
|
||||
return [];
|
||||
}),
|
||||
Promise.resolve(getBedrockModels()).catch((error) => {
|
||||
logger.error('Error getting Bedrock models:', error);
|
||||
return [];
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.agents]: openAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.assistants]: assistants,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||
|
||||
143
api/server/services/Config/mcpToolsCache.js
Normal file
143
api/server/services/Config/mcpToolsCache.js
Normal file
@@ -0,0 +1,143 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { getCachedTools, setCachedTools } = require('./getCachedTools');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* Updates MCP tools in the cache for a specific server and user
|
||||
* @param {Object} params - Parameters for updating MCP tools
|
||||
* @param {string} params.userId - User ID
|
||||
* @param {string} params.serverName - MCP server name
|
||||
* @param {Array} params.tools - Array of tool objects from MCP server
|
||||
* @returns {Promise<LCAvailableTools>}
|
||||
*/
|
||||
async function updateMCPUserTools({ userId, serverName, tools }) {
|
||||
try {
|
||||
const userTools = await getCachedTools({ userId });
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`[MCP Cache] Updated ${tools.length} tools for ${serverName} user ${userId}`);
|
||||
return userTools;
|
||||
} catch (error) {
|
||||
logger.error(`[MCP Cache] Failed to update tools for ${serverName}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges app-level tools with global tools
|
||||
* @param {import('@librechat/api').LCAvailableTools} appTools
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function mergeAppTools(appTools) {
|
||||
try {
|
||||
const count = Object.keys(appTools).length;
|
||||
if (!count) {
|
||||
return;
|
||||
}
|
||||
const cachedTools = await getCachedTools({ includeGlobal: true });
|
||||
const mergedTools = { ...cachedTools, ...appTools };
|
||||
await setCachedTools(mergedTools, { isGlobal: true });
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`Merged ${count} app-level tools`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to merge app-level tools:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges user-level tools with global tools
|
||||
* @param {object} params
|
||||
* @param {string} params.userId
|
||||
* @param {Record<string, FunctionTool>} params.cachedUserTools
|
||||
* @param {import('@librechat/api').LCAvailableTools} params.userTools
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function mergeUserTools({ userId, cachedUserTools, userTools }) {
|
||||
try {
|
||||
if (!userId) {
|
||||
return;
|
||||
}
|
||||
const count = Object.keys(userTools).length;
|
||||
if (!count) {
|
||||
return;
|
||||
}
|
||||
const cachedTools = cachedUserTools ?? (await getCachedTools({ userId }));
|
||||
const mergedTools = { ...cachedTools, ...userTools };
|
||||
await setCachedTools(mergedTools, { userId });
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`Merged ${count} user-level tools`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to merge user-level tools:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all MCP tools for a specific server
|
||||
* @param {Object} params - Parameters for clearing MCP tools
|
||||
* @param {string} [params.userId] - User ID (if clearing user-specific tools)
|
||||
* @param {string} params.serverName - MCP server name
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function clearMCPServerTools({ userId, serverName }) {
|
||||
try {
|
||||
const tools = await getCachedTools({ userId, includeGlobal: !userId });
|
||||
|
||||
// Remove all tools for this server
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
let removedCount = 0;
|
||||
for (const key of Object.keys(tools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete tools[key];
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
await setCachedTools(tools, userId ? { userId } : { isGlobal: true });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
|
||||
logger.debug(
|
||||
`[MCP Cache] Removed ${removedCount} tools for ${serverName}${userId ? ` user ${userId}` : ' (global)'}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[MCP Cache] Failed to clear tools for ${serverName}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mergeAppTools,
|
||||
mergeUserTools,
|
||||
updateMCPUserTools,
|
||||
clearMCPServerTools,
|
||||
};
|
||||
@@ -30,7 +30,13 @@ const { getModelMaxTokens } = require('~/utils');
|
||||
* @param {TEndpointOption} [params.endpointOption]
|
||||
* @param {Set<string>} [params.allowedProviders]
|
||||
* @param {boolean} [params.isInitialAgent]
|
||||
* @returns {Promise<Agent & { tools: StructuredTool[], attachments: Array<MongoFile>, toolContextMap: Record<string, unknown>, maxContextTokens: number }>}
|
||||
* @returns {Promise<Agent & {
|
||||
* tools: StructuredTool[],
|
||||
* attachments: Array<MongoFile>,
|
||||
* toolContextMap: Record<string, unknown>,
|
||||
* maxContextTokens: number,
|
||||
* userMCPAuthMap?: Record<string, Record<string, string>>
|
||||
* }>}
|
||||
*/
|
||||
const initializeAgent = async ({
|
||||
req,
|
||||
@@ -91,16 +97,19 @@ const initializeAgent = async ({
|
||||
});
|
||||
|
||||
const provider = agent.provider;
|
||||
const { tools: structuredTools, toolContextMap } =
|
||||
(await loadTools?.({
|
||||
req,
|
||||
res,
|
||||
provider,
|
||||
agentId: agent.id,
|
||||
tools: agent.tools,
|
||||
model: agent.model,
|
||||
tool_resources,
|
||||
})) ?? {};
|
||||
const {
|
||||
tools: structuredTools,
|
||||
toolContextMap,
|
||||
userMCPAuthMap,
|
||||
} = (await loadTools?.({
|
||||
req,
|
||||
res,
|
||||
provider,
|
||||
agentId: agent.id,
|
||||
tools: agent.tools,
|
||||
model: agent.model,
|
||||
tool_resources,
|
||||
})) ?? {};
|
||||
|
||||
agent.endpoint = provider;
|
||||
const { getOptions, overrideProvider } = await getProviderConfig(provider);
|
||||
@@ -189,6 +198,7 @@ const initializeAgent = async ({
|
||||
tools,
|
||||
attachments,
|
||||
resendFiles,
|
||||
userMCPAuthMap,
|
||||
toolContextMap,
|
||||
useLegacyContent: !!options.useLegacyContent,
|
||||
maxContextTokens: Math.round((agentMaxContextTokens - maxTokens) * 0.9),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isAgentsEndpoint, removeNullishValues, Constants } = require('librechat-data-provider');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const buildOptions = (req, endpoint, parsedBody, endpointType) => {
|
||||
const { spec, iconURL, agent_id, instructions, ...model_parameters } = parsedBody;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { validateAgentModel } = require('@librechat/api');
|
||||
const { createContentAggregator } = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
@@ -11,12 +12,17 @@ const {
|
||||
getDefaultHandlers,
|
||||
} = require('~/server/controllers/agents/callbacks');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
const { loadAgentTools } = require('~/server/services/ToolService');
|
||||
const AgentClient = require('~/server/controllers/agents/client');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
function createToolLoader() {
|
||||
/**
|
||||
* @param {AbortSignal} signal
|
||||
*/
|
||||
function createToolLoader(signal) {
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {ServerRequest} params.req
|
||||
@@ -26,7 +32,11 @@ function createToolLoader() {
|
||||
* @param {string} params.provider
|
||||
* @param {string} params.model
|
||||
* @param {AgentToolResources} params.tool_resources
|
||||
* @returns {Promise<{ tools: StructuredTool[], toolContextMap: Record<string, unknown> } | undefined>}
|
||||
* @returns {Promise<{
|
||||
* tools: StructuredTool[],
|
||||
* toolContextMap: Record<string, unknown>,
|
||||
* userMCPAuthMap?: Record<string, Record<string, string>>
|
||||
* } | undefined>}
|
||||
*/
|
||||
return async function loadTools({ req, res, agentId, tools, provider, model, tool_resources }) {
|
||||
const agent = { id: agentId, tools, provider, model };
|
||||
@@ -35,6 +45,7 @@ function createToolLoader() {
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
signal,
|
||||
tool_resources,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -43,7 +54,7 @@ function createToolLoader() {
|
||||
};
|
||||
}
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
const initializeClient = async ({ req, res, signal, endpointOption }) => {
|
||||
if (!endpointOption) {
|
||||
throw new Error('Endpoint option not provided');
|
||||
}
|
||||
@@ -72,11 +83,24 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
const modelsConfig = await getModelsConfig(req);
|
||||
const validationResult = await validateAgentModel({
|
||||
req,
|
||||
res,
|
||||
modelsConfig,
|
||||
logViolation,
|
||||
agent: primaryAgent,
|
||||
});
|
||||
|
||||
if (!validationResult.isValid) {
|
||||
throw new Error(validationResult.error?.message);
|
||||
}
|
||||
|
||||
const agentConfigs = new Map();
|
||||
/** @type {Set<string>} */
|
||||
const allowedProviders = new Set(req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders);
|
||||
|
||||
const loadTools = createToolLoader();
|
||||
const loadTools = createToolLoader(signal);
|
||||
/** @type {Array<MongoFile>} */
|
||||
const requestFiles = req.body.files ?? [];
|
||||
/** @type {string} */
|
||||
@@ -95,12 +119,26 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
});
|
||||
|
||||
const agent_ids = primaryConfig.agent_ids;
|
||||
let userMCPAuthMap = primaryConfig.userMCPAuthMap;
|
||||
if (agent_ids?.length) {
|
||||
for (const agentId of agent_ids) {
|
||||
const agent = await getAgent({ id: agentId });
|
||||
if (!agent) {
|
||||
throw new Error(`Agent ${agentId} not found`);
|
||||
}
|
||||
|
||||
const validationResult = await validateAgentModel({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
modelsConfig,
|
||||
logViolation,
|
||||
});
|
||||
|
||||
if (!validationResult.isValid) {
|
||||
throw new Error(validationResult.error?.message);
|
||||
}
|
||||
|
||||
const config = await initializeAgent({
|
||||
req,
|
||||
res,
|
||||
@@ -111,6 +149,11 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
endpointOption,
|
||||
allowedProviders,
|
||||
});
|
||||
if (userMCPAuthMap != null) {
|
||||
Object.assign(userMCPAuthMap, config.userMCPAuthMap ?? {});
|
||||
} else {
|
||||
userMCPAuthMap = config.userMCPAuthMap;
|
||||
}
|
||||
agentConfigs.set(agentId, config);
|
||||
}
|
||||
}
|
||||
@@ -159,7 +202,7 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
: EModelEndpoint.agents,
|
||||
});
|
||||
|
||||
return { client };
|
||||
return { client, userMCPAuthMap };
|
||||
};
|
||||
|
||||
module.exports = { initializeClient };
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { saveConvo } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Add title to conversation in a way that avoids memory retention
|
||||
|
||||
@@ -45,6 +45,10 @@ function getClaudeHeaders(model, supportsCacheControl) {
|
||||
'anthropic-beta':
|
||||
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
|
||||
};
|
||||
} else if (/claude-sonnet-4/.test(model)) {
|
||||
return {
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
};
|
||||
} else if (
|
||||
/claude-(?:sonnet|opus|haiku)-[4-9]/.test(model) ||
|
||||
/claude-[4-9]-(?:sonnet|opus|haiku)?/.test(model) ||
|
||||
|
||||
@@ -39,8 +39,9 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
if (optionsOnly) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
userId: req.user.id,
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
modelOptions: endpointOption?.model_parameters ?? {},
|
||||
},
|
||||
clientOptions,
|
||||
|
||||
@@ -15,6 +15,7 @@ const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = requir
|
||||
* @param {number} [options.modelOptions.topK] - Controls the number of top tokens to consider.
|
||||
* @param {string[]} [options.modelOptions.stop] - Sequences where the API will stop generating further tokens.
|
||||
* @param {boolean} [options.modelOptions.stream] - Whether to stream the response.
|
||||
* @param {string} options.userId - The user ID for tracking and personalization.
|
||||
* @param {string} [options.proxy] - Proxy server URL.
|
||||
* @param {string} [options.reverseProxyUrl] - URL for a reverse proxy, if used.
|
||||
*
|
||||
@@ -47,6 +48,11 @@ function getLLMConfig(apiKey, options = {}) {
|
||||
maxTokens:
|
||||
mergedOptions.maxOutputTokens || anthropicSettings.maxOutputTokens.reset(mergedOptions.model),
|
||||
clientOptions: {},
|
||||
invocationKwargs: {
|
||||
metadata: {
|
||||
user_id: options.userId,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
requestOptions = configureReasoning(requestOptions, systemOptions);
|
||||
|
||||
@@ -1,50 +1,19 @@
|
||||
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
||||
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
||||
|
||||
jest.mock('https-proxy-agent', () => ({
|
||||
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
|
||||
}));
|
||||
|
||||
jest.mock('./helpers', () => ({
|
||||
checkPromptCacheSupport: jest.fn(),
|
||||
getClaudeHeaders: jest.fn(),
|
||||
configureReasoning: jest.fn((requestOptions) => requestOptions),
|
||||
}));
|
||||
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
anthropicSettings: {
|
||||
model: { default: 'claude-3-opus-20240229' },
|
||||
maxOutputTokens: { default: 4096, reset: jest.fn(() => 4096) },
|
||||
thinking: { default: false },
|
||||
promptCache: { default: false },
|
||||
thinkingBudget: { default: null },
|
||||
},
|
||||
removeNullishValues: jest.fn((obj) => {
|
||||
const result = {};
|
||||
for (const key in obj) {
|
||||
if (obj[key] !== null && obj[key] !== undefined) {
|
||||
result[key] = obj[key];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('getLLMConfig', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
checkPromptCacheSupport.mockReturnValue(false);
|
||||
getClaudeHeaders.mockReturnValue(undefined);
|
||||
configureReasoning.mockImplementation((requestOptions) => requestOptions);
|
||||
anthropicSettings.maxOutputTokens.reset.mockReturnValue(4096);
|
||||
});
|
||||
|
||||
it('should create a basic configuration with default values', () => {
|
||||
const result = getLLMConfig('test-api-key', { modelOptions: {} });
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('apiKey', 'test-api-key');
|
||||
expect(result.llmConfig).toHaveProperty('model', anthropicSettings.model.default);
|
||||
expect(result.llmConfig).toHaveProperty('model', 'claude-3-5-sonnet-latest');
|
||||
expect(result.llmConfig).toHaveProperty('stream', true);
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens');
|
||||
});
|
||||
@@ -99,40 +68,73 @@ describe('getLLMConfig', () => {
|
||||
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3-7 models (hyphen notation)', () => {
|
||||
configureReasoning.mockImplementation((requestOptions) => {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
return requestOptions;
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3-7 models with thinking enabled (hyphen notation)', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('topK');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// When thinking is enabled, it uses the default thinkingBudget of 2000
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3.7 models (decimal notation)', () => {
|
||||
configureReasoning.mockImplementation((requestOptions) => {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
return requestOptions;
|
||||
});
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model', () => {
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
promptCache: true,
|
||||
};
|
||||
const result = getLLMConfig('test-key', { modelOptions });
|
||||
const clientOptions = result.llmConfig.clientOptions;
|
||||
expect(clientOptions.defaultHeaders).toBeDefined();
|
||||
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model formats', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const modelOptions = { model, promptCache: true };
|
||||
const result = getLLMConfig('test-key', { modelOptions });
|
||||
const clientOptions = result.llmConfig.clientOptions;
|
||||
expect(clientOptions.defaultHeaders).toBeDefined();
|
||||
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3.7 models with thinking enabled (decimal notation)', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('topK');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// When thinking is enabled, it uses the default thinkingBudget of 2000
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should handle custom maxOutputTokens', () => {
|
||||
@@ -233,7 +235,6 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
it('should handle maxOutputTokens when explicitly set to falsy value', () => {
|
||||
anthropicSettings.maxOutputTokens.reset.mockReturnValue(8192);
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
@@ -241,8 +242,8 @@ describe('getLLMConfig', () => {
|
||||
},
|
||||
});
|
||||
|
||||
expect(anthropicSettings.maxOutputTokens.reset).toHaveBeenCalledWith('claude-3-opus');
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens', 8192);
|
||||
// The actual anthropicSettings.maxOutputTokens.reset('claude-3-opus') returns 4096
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens', 4096);
|
||||
});
|
||||
|
||||
it('should handle both proxy and reverseProxyUrl', () => {
|
||||
@@ -263,9 +264,6 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
it('should handle prompt cache with supported model', () => {
|
||||
checkPromptCacheSupport.mockReturnValue(true);
|
||||
getClaudeHeaders.mockReturnValue({ 'anthropic-beta': 'prompt-caching-2024-07-31' });
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
@@ -273,43 +271,38 @@ describe('getLLMConfig', () => {
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkPromptCacheSupport).toHaveBeenCalledWith('claude-3-5-sonnet');
|
||||
expect(getClaudeHeaders).toHaveBeenCalledWith('claude-3-5-sonnet', true);
|
||||
// claude-3-5-sonnet supports prompt caching and should get the appropriate headers
|
||||
expect(result.llmConfig.clientOptions.defaultHeaders).toEqual({
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle thinking and thinkingBudget options', () => {
|
||||
configureReasoning.mockImplementation((requestOptions, systemOptions) => {
|
||||
if (systemOptions.thinking) {
|
||||
requestOptions.thinking = { type: 'enabled' };
|
||||
}
|
||||
if (systemOptions.thinkingBudget) {
|
||||
requestOptions.thinking = {
|
||||
...requestOptions.thinking,
|
||||
budget_tokens: systemOptions.thinkingBudget,
|
||||
};
|
||||
}
|
||||
return requestOptions;
|
||||
});
|
||||
|
||||
getLLMConfig('test-api-key', {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
thinking: true,
|
||||
thinkingBudget: 5000,
|
||||
thinkingBudget: 10000, // This exceeds the default max_tokens of 8192
|
||||
},
|
||||
});
|
||||
|
||||
expect(configureReasoning).toHaveBeenCalledWith(
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
// The function should add thinking configuration for claude-3-7 models
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// With claude-3-7-sonnet, the max_tokens default is 8192
|
||||
// Budget tokens gets adjusted to 90% of max_tokens (8192 * 0.9 = 7372) when it exceeds max_tokens
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 7372);
|
||||
|
||||
// Test with budget_tokens within max_tokens limit
|
||||
const result2 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
thinking: true,
|
||||
promptCache: false,
|
||||
thinkingBudget: 5000,
|
||||
}),
|
||||
);
|
||||
thinkingBudget: 2000,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result2.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should remove system options from modelOptions', () => {
|
||||
@@ -330,16 +323,6 @@ describe('getLLMConfig', () => {
|
||||
});
|
||||
|
||||
it('should handle all nullish values removal', () => {
|
||||
removeNullishValues.mockImplementation((obj) => {
|
||||
const cleaned = {};
|
||||
Object.entries(obj).forEach(([key, value]) => {
|
||||
if (value !== null && value !== undefined) {
|
||||
cleaned[key] = value;
|
||||
}
|
||||
});
|
||||
return cleaned;
|
||||
});
|
||||
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
temperature: null,
|
||||
|
||||
@@ -109,14 +109,14 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie
|
||||
|
||||
apiKey = azureOptions.azureOpenAIApiKey;
|
||||
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
|
||||
opts.defaultHeaders = resolveHeaders(
|
||||
{
|
||||
opts.defaultHeaders = resolveHeaders({
|
||||
headers: {
|
||||
...headers,
|
||||
'api-key': apiKey,
|
||||
'OpenAI-Beta': `assistants=${version}`,
|
||||
},
|
||||
req.user,
|
||||
);
|
||||
user: req.user,
|
||||
});
|
||||
opts.model = azureOptions.azureOpenAIApiDeploymentName;
|
||||
|
||||
if (initAppClient) {
|
||||
|
||||
@@ -28,7 +28,11 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
||||
const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey);
|
||||
const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL);
|
||||
|
||||
let resolvedHeaders = resolveHeaders(endpointConfig.headers, req.user);
|
||||
let resolvedHeaders = resolveHeaders({
|
||||
headers: endpointConfig.headers,
|
||||
user: req.user,
|
||||
body: req.body,
|
||||
});
|
||||
|
||||
if (CUSTOM_API_KEY.match(envVarRegex)) {
|
||||
throw new Error(`Missing API Key for ${endpoint}.`);
|
||||
|
||||
@@ -64,13 +64,14 @@ describe('custom/initializeClient', () => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('calls resolveHeaders with headers and user', async () => {
|
||||
it('calls resolveHeaders with headers, user, and body for body placeholder support', async () => {
|
||||
const { resolveHeaders } = require('@librechat/api');
|
||||
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
||||
expect(resolveHeaders).toHaveBeenCalledWith(
|
||||
{ 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
{ id: 'user-123', email: 'test@example.com' },
|
||||
);
|
||||
expect(resolveHeaders).toHaveBeenCalledWith({
|
||||
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
body: { endpoint: 'test-endpoint' }, // body - supports {{LIBRECHAT_BODY_*}} placeholders
|
||||
});
|
||||
});
|
||||
|
||||
it('throws if endpoint config is missing', async () => {
|
||||
|
||||
@@ -81,10 +81,10 @@ const initializeClient = async ({
|
||||
serverless = _serverless;
|
||||
|
||||
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders(
|
||||
{ ...headers, ...(clientOptions.headers ?? {}) },
|
||||
req.user,
|
||||
);
|
||||
clientOptions.headers = resolveHeaders({
|
||||
headers: { ...headers, ...(clientOptions.headers ?? {}) },
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
clientOptions.titleConvo = azureConfig.titleConvo;
|
||||
clientOptions.titleModel = azureConfig.titleModel;
|
||||
|
||||
@@ -2,10 +2,10 @@ const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const fetch = require('node-fetch');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage');
|
||||
const { getBufferMetadata } = require('~/server/utils');
|
||||
const { getFirebaseStorage } = require('./initialize');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Deletes a file from Firebase Storage.
|
||||
@@ -145,7 +145,10 @@ function extractFirebaseFilePath(urlString) {
|
||||
}
|
||||
|
||||
return '';
|
||||
} catch (error) {
|
||||
} catch {
|
||||
logger.debug(
|
||||
'[extractFirebaseFilePath] Failed to extract Firebase file path from URL, returning empty string',
|
||||
);
|
||||
// If URL parsing fails, return an empty string
|
||||
return '';
|
||||
}
|
||||
@@ -211,14 +214,24 @@ async function uploadFileToFirebase({ req, file, file_id }) {
|
||||
const inputBuffer = await fs.promises.readFile(inputFilePath);
|
||||
const bytes = Buffer.byteLength(inputBuffer);
|
||||
const userId = req.user.id;
|
||||
|
||||
const fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
|
||||
const downloadURL = await saveBufferToFirebase({ userId, buffer: inputBuffer, fileName });
|
||||
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
|
||||
return { filepath: downloadURL, bytes };
|
||||
try {
|
||||
const downloadURL = await saveBufferToFirebase({ userId, buffer: inputBuffer, fileName });
|
||||
return { filepath: downloadURL, bytes };
|
||||
} catch (err) {
|
||||
logger.error('[uploadFileToFirebase] Error saving file buffer to Firebase:', err);
|
||||
try {
|
||||
if (file && file.path) {
|
||||
await fs.promises.unlink(file.path);
|
||||
}
|
||||
} catch (unlinkError) {
|
||||
logger.error(
|
||||
'[uploadFileToFirebase] Error deleting temporary file, likely already deleted:',
|
||||
unlinkError.message,
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,18 +1,29 @@
|
||||
const { z } = require('zod');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Time, CacheKeys, StepTypes } = require('librechat-data-provider');
|
||||
const { Constants: AgentConstants, Providers, GraphEvents } = require('@librechat/agents');
|
||||
const { Constants, ContentTypes, isAssistantsEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
Providers,
|
||||
StepTypes,
|
||||
GraphEvents,
|
||||
Constants: AgentConstants,
|
||||
} = require('@librechat/agents');
|
||||
const {
|
||||
sendEvent,
|
||||
MCPOAuthHandler,
|
||||
normalizeServerName,
|
||||
convertWithResolvedRefs,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
CacheKeys,
|
||||
Constants,
|
||||
ContentTypes,
|
||||
isAssistantsEndpoint,
|
||||
} = require('librechat-data-provider');
|
||||
const { getCachedTools, loadCustomConfig } = require('./Config');
|
||||
const { findToken, createToken, updateToken } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getCachedTools, loadCustomConfig } = require('./Config');
|
||||
const { reinitMCPServer } = require('./Tools/mcp');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
@@ -20,16 +31,13 @@ const { getLogStores } = require('~/cache');
|
||||
* @param {ServerResponse} params.res - The Express response object for sending events.
|
||||
* @param {string} params.stepId - The ID of the step in the flow.
|
||||
* @param {ToolCallChunk} params.toolCall - The tool call object containing tool information.
|
||||
* @param {string} params.loginFlowId - The ID of the login flow.
|
||||
* @param {FlowStateManager<any>} params.flowManager - The flow manager instance.
|
||||
*/
|
||||
function createOAuthStart({ res, stepId, toolCall, loginFlowId, flowManager, signal }) {
|
||||
function createRunStepDeltaEmitter({ res, stepId, toolCall }) {
|
||||
/**
|
||||
* Creates a function to handle OAuth login requests.
|
||||
* @param {string} authURL - The URL to redirect the user for OAuth authentication.
|
||||
* @returns {Promise<boolean>} Returns true to indicate the event was sent successfully.
|
||||
* @returns {void}
|
||||
*/
|
||||
return async function (authURL) {
|
||||
return function (authURL) {
|
||||
/** @type {{ id: string; delta: AgentToolCallDelta }} */
|
||||
const data = {
|
||||
id: stepId,
|
||||
@@ -40,17 +48,54 @@ function createOAuthStart({ res, stepId, toolCall, loginFlowId, flowManager, sig
|
||||
expires_at: Date.now() + Time.TWO_MINUTES,
|
||||
},
|
||||
};
|
||||
/** Used to ensure the handler (use of `sendEvent`) is only invoked once */
|
||||
await flowManager.createFlowWithHandler(
|
||||
loginFlowId,
|
||||
'oauth_login',
|
||||
async () => {
|
||||
sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data });
|
||||
logger.debug('Sent OAuth login request to client');
|
||||
return true;
|
||||
sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data });
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {ServerResponse} params.res - The Express response object for sending events.
|
||||
* @param {string} params.runId - The Run ID, i.e. message ID
|
||||
* @param {string} params.stepId - The ID of the step in the flow.
|
||||
* @param {ToolCallChunk} params.toolCall - The tool call object containing tool information.
|
||||
* @param {number} [params.index]
|
||||
*/
|
||||
function createRunStepEmitter({ res, runId, stepId, toolCall, index }) {
|
||||
return function () {
|
||||
/** @type {import('@librechat/agents').RunStep} */
|
||||
const data = {
|
||||
runId: runId ?? Constants.USE_PRELIM_RESPONSE_MESSAGE_ID,
|
||||
id: stepId,
|
||||
type: StepTypes.TOOL_CALLS,
|
||||
index: index ?? 0,
|
||||
stepDetails: {
|
||||
type: StepTypes.TOOL_CALLS,
|
||||
tool_calls: [toolCall],
|
||||
},
|
||||
signal,
|
||||
);
|
||||
};
|
||||
sendEvent(res, { event: GraphEvents.ON_RUN_STEP, data });
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a function used to ensure the flow handler is only invoked once
|
||||
* @param {object} params
|
||||
* @param {string} params.flowId - The ID of the login flow.
|
||||
* @param {FlowStateManager<any>} params.flowManager - The flow manager instance.
|
||||
* @param {(authURL: string) => void} [params.callback]
|
||||
*/
|
||||
function createOAuthStart({ flowId, flowManager, callback }) {
|
||||
/**
|
||||
* Creates a function to handle OAuth login requests.
|
||||
* @param {string} authURL - The URL to redirect the user for OAuth authentication.
|
||||
* @returns {Promise<boolean>} Returns true to indicate the event was sent successfully.
|
||||
*/
|
||||
return async function (authURL) {
|
||||
await flowManager.createFlowWithHandler(flowId, 'oauth_login', async () => {
|
||||
callback?.(authURL);
|
||||
logger.debug('Sent OAuth login request to client');
|
||||
return true;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@@ -93,23 +138,166 @@ function createAbortHandler({ userId, serverName, toolName, flowManager }) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a general tool for an entire action set.
|
||||
* @param {Object} params
|
||||
* @param {() => void} params.runStepEmitter
|
||||
* @param {(authURL: string) => void} params.runStepDeltaEmitter
|
||||
* @returns {(authURL: string) => void}
|
||||
*/
|
||||
function createOAuthCallback({ runStepEmitter, runStepDeltaEmitter }) {
|
||||
return function (authURL) {
|
||||
runStepEmitter();
|
||||
runStepDeltaEmitter(authURL);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req - The Express request object, containing user/request info.
|
||||
* @param {ServerResponse} params.res - The Express response object for sending events.
|
||||
* @param {string} params.serverName
|
||||
* @param {AbortSignal} params.signal
|
||||
* @param {string} params.model
|
||||
* @param {number} [params.index]
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
* @returns { Promise<Array<typeof tool | { _call: (toolInput: Object | string) => unknown}>> } An object with `_call` method to execute the tool input.
|
||||
*/
|
||||
async function reconnectServer({ req, res, index, signal, serverName, userMCPAuthMap }) {
|
||||
const runId = Constants.USE_PRELIM_RESPONSE_MESSAGE_ID;
|
||||
const flowId = `${req.user?.id}:${serverName}:${Date.now()}`;
|
||||
const flowManager = getFlowStateManager(getLogStores(CacheKeys.FLOWS));
|
||||
const stepId = 'step_oauth_login_' + serverName;
|
||||
const toolCall = {
|
||||
id: flowId,
|
||||
name: serverName,
|
||||
type: 'tool_call_chunk',
|
||||
};
|
||||
|
||||
const runStepEmitter = createRunStepEmitter({
|
||||
res,
|
||||
index,
|
||||
runId,
|
||||
stepId,
|
||||
toolCall,
|
||||
});
|
||||
const runStepDeltaEmitter = createRunStepDeltaEmitter({
|
||||
res,
|
||||
stepId,
|
||||
toolCall,
|
||||
});
|
||||
const callback = createOAuthCallback({ runStepEmitter, runStepDeltaEmitter });
|
||||
const oauthStart = createOAuthStart({
|
||||
res,
|
||||
flowId,
|
||||
callback,
|
||||
flowManager,
|
||||
});
|
||||
return await reinitMCPServer({
|
||||
req,
|
||||
signal,
|
||||
serverName,
|
||||
oauthStart,
|
||||
flowManager,
|
||||
userMCPAuthMap,
|
||||
forceNew: true,
|
||||
returnOnOAuth: false,
|
||||
connectionTimeout: Time.TWO_MINUTES,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates all tools from the specified MCP Server via `toolKey`.
|
||||
*
|
||||
* @param {Object} params - The parameters for loading action sets.
|
||||
* This function assumes tools could not be aggregated from the cache of tool definitions,
|
||||
* i.e. `availableTools`, and will reinitialize the MCP server to ensure all tools are generated.
|
||||
*
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req - The Express request object, containing user/request info.
|
||||
* @param {ServerResponse} params.res - The Express response object for sending events.
|
||||
* @param {string} params.serverName
|
||||
* @param {string} params.model
|
||||
* @param {Providers | EModelEndpoint} params.provider - The provider for the tool.
|
||||
* @param {number} [params.index]
|
||||
* @param {AbortSignal} [params.signal]
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
* @returns { Promise<Array<typeof tool | { _call: (toolInput: Object | string) => unknown}>> } An object with `_call` method to execute the tool input.
|
||||
*/
|
||||
async function createMCPTools({ req, res, index, signal, serverName, provider, userMCPAuthMap }) {
|
||||
const result = await reconnectServer({ req, res, index, signal, serverName, userMCPAuthMap });
|
||||
if (!result || !result.tools) {
|
||||
logger.warn(`[MCP][${serverName}] Failed to reinitialize MCP server.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const serverTools = [];
|
||||
for (const tool of result.tools) {
|
||||
const toolInstance = await createMCPTool({
|
||||
req,
|
||||
res,
|
||||
provider,
|
||||
userMCPAuthMap,
|
||||
availableTools: result.availableTools,
|
||||
toolKey: `${tool.name}${Constants.mcp_delimiter}${serverName}`,
|
||||
});
|
||||
if (toolInstance) {
|
||||
serverTools.push(toolInstance);
|
||||
}
|
||||
}
|
||||
|
||||
return serverTools;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a single tool from the specified MCP Server via `toolKey`.
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req - The Express request object, containing user/request info.
|
||||
* @param {ServerResponse} params.res - The Express response object for sending events.
|
||||
* @param {string} params.toolKey - The toolKey for the tool.
|
||||
* @param {import('@librechat/agents').Providers | EModelEndpoint} params.provider - The provider for the tool.
|
||||
* @param {string} params.model - The model for the tool.
|
||||
* @param {number} [params.index]
|
||||
* @param {AbortSignal} [params.signal]
|
||||
* @param {Providers | EModelEndpoint} params.provider - The provider for the tool.
|
||||
* @param {LCAvailableTools} [params.availableTools]
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
* @returns { Promise<typeof tool | { _call: (toolInput: Object | string) => unknown}> } An object with `_call` method to execute the tool input.
|
||||
*/
|
||||
async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
const availableTools = await getCachedTools({ userId: req.user?.id, includeGlobal: true });
|
||||
const toolDefinition = availableTools?.[toolKey]?.function;
|
||||
async function createMCPTool({
|
||||
req,
|
||||
res,
|
||||
index,
|
||||
signal,
|
||||
toolKey,
|
||||
provider,
|
||||
userMCPAuthMap,
|
||||
availableTools: tools,
|
||||
}) {
|
||||
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
|
||||
const availableTools =
|
||||
tools ?? (await getCachedTools({ userId: req.user?.id, includeGlobal: true }));
|
||||
/** @type {LCTool | undefined} */
|
||||
let toolDefinition = availableTools?.[toolKey]?.function;
|
||||
if (!toolDefinition) {
|
||||
logger.error(`Tool ${toolKey} not found in available tools`);
|
||||
return null;
|
||||
logger.warn(
|
||||
`[MCP][${serverName}][${toolName}] Requested tool not found in available tools, re-initializing MCP server.`,
|
||||
);
|
||||
const result = await reconnectServer({ req, res, index, signal, serverName, userMCPAuthMap });
|
||||
toolDefinition = result?.availableTools?.[toolKey]?.function;
|
||||
}
|
||||
|
||||
if (!toolDefinition) {
|
||||
logger.warn(`[MCP][${serverName}][${toolName}] Tool definition not found, cannot create tool.`);
|
||||
return;
|
||||
}
|
||||
|
||||
return createToolInstance({
|
||||
res,
|
||||
provider,
|
||||
toolName,
|
||||
serverName,
|
||||
toolDefinition,
|
||||
});
|
||||
}
|
||||
|
||||
function createToolInstance({ res, toolName, serverName, toolDefinition, provider: _provider }) {
|
||||
/** @type {LCTool} */
|
||||
const { description, parameters } = toolDefinition;
|
||||
const isGoogle = _provider === Providers.VERTEXAI || _provider === Providers.GOOGLE;
|
||||
@@ -122,16 +310,8 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
schema = z.object({ input: z.string().optional() });
|
||||
}
|
||||
|
||||
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
|
||||
const normalizedToolKey = `${toolName}${Constants.mcp_delimiter}${normalizeServerName(serverName)}`;
|
||||
|
||||
if (!req.user?.id) {
|
||||
logger.error(
|
||||
`[MCP][${serverName}][${toolName}] User ID not found on request. Cannot create tool.`,
|
||||
);
|
||||
throw new Error(`User ID not found on request. Cannot create tool for ${toolKey}.`);
|
||||
}
|
||||
|
||||
/** @type {(toolArguments: Object | string, config?: GraphRunnableConfig) => Promise<unknown>} */
|
||||
const _call = async (toolArguments, config) => {
|
||||
const userId = config?.configurable?.user?.id || config?.configurable?.user_id;
|
||||
@@ -148,14 +328,16 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
const provider = (config?.metadata?.provider || _provider)?.toLowerCase();
|
||||
|
||||
const { args: _args, stepId, ...toolCall } = config.toolCall ?? {};
|
||||
const loginFlowId = `${serverName}:oauth_login:${config.metadata.thread_id}:${config.metadata.run_id}`;
|
||||
const oauthStart = createOAuthStart({
|
||||
const flowId = `${serverName}:oauth_login:${config.metadata.thread_id}:${config.metadata.run_id}`;
|
||||
const runStepDeltaEmitter = createRunStepDeltaEmitter({
|
||||
res,
|
||||
stepId,
|
||||
toolCall,
|
||||
loginFlowId,
|
||||
});
|
||||
const oauthStart = createOAuthStart({
|
||||
flowId,
|
||||
flowManager,
|
||||
signal: derivedSignal,
|
||||
callback: runStepDeltaEmitter,
|
||||
});
|
||||
const oauthEnd = createOAuthEnd({
|
||||
res,
|
||||
@@ -180,6 +362,7 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
signal: derivedSignal,
|
||||
},
|
||||
user: config?.configurable?.user,
|
||||
requestBody: config?.configurable?.requestBody,
|
||||
customUserVars,
|
||||
flowManager,
|
||||
tokenMethods: {
|
||||
@@ -200,7 +383,7 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[MCP][User: ${userId}][${serverName}] Error calling "${toolName}" MCP tool:`,
|
||||
`[MCP][${serverName}][${toolName}][User: ${userId}] Error calling MCP tool:`,
|
||||
error,
|
||||
);
|
||||
|
||||
@@ -213,12 +396,12 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
|
||||
if (isOAuthError) {
|
||||
throw new Error(
|
||||
`OAuth authentication required for ${serverName}. Please check the server logs for the authentication URL.`,
|
||||
`[MCP][${serverName}][${toolName}] OAuth authentication required. Please check the server logs for the authentication URL.`,
|
||||
);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`"${toolKey}" tool call failed${error?.message ? `: ${error?.message}` : '.'}`,
|
||||
`[MCP][${serverName}][${toolName}] tool call failed${error?.message ? `: ${error?.message}` : '.'}`,
|
||||
);
|
||||
} finally {
|
||||
// Clean up abort handler to prevent memory leaks
|
||||
@@ -254,15 +437,21 @@ async function getMCPSetupData(userId) {
|
||||
}
|
||||
|
||||
const mcpManager = getMCPManager(userId);
|
||||
const appConnections = mcpManager.getAllConnections() || new Map();
|
||||
/** @type {ReturnType<MCPManager['getAllConnections']>} */
|
||||
let appConnections = new Map();
|
||||
try {
|
||||
appConnections = (await mcpManager.getAllConnections()) || new Map();
|
||||
} catch (error) {
|
||||
logger.error(`[MCP][User: ${userId}] Error getting app connections:`, error);
|
||||
}
|
||||
const userConnections = mcpManager.getUserConnections(userId) || new Map();
|
||||
const oauthServers = mcpManager.getOAuthServers() || new Set();
|
||||
|
||||
return {
|
||||
mcpConfig,
|
||||
oauthServers,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -367,6 +556,7 @@ async function getServerConnectionStatus(
|
||||
|
||||
module.exports = {
|
||||
createMCPTool,
|
||||
createMCPTools,
|
||||
getMCPSetupData,
|
||||
checkOAuthFlowStatus,
|
||||
getServerConnectionStatus,
|
||||
|
||||
@@ -34,6 +34,7 @@ const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService')
|
||||
* @param {string} params.apiKey - The API key for authentication with the API.
|
||||
* @param {string} params.baseURL - The base path URL for the API.
|
||||
* @param {string} [params.name='OpenAI'] - The name of the API; defaults to 'OpenAI'.
|
||||
* @param {boolean} [params.direct=false] - Whether `directEndpoint` was configured
|
||||
* @param {boolean} [params.azure=false] - Whether to fetch models from Azure.
|
||||
* @param {boolean} [params.userIdQuery=false] - Whether to send the user ID as a query parameter.
|
||||
* @param {boolean} [params.createTokenConfig=true] - Whether to create a token configuration from the API response.
|
||||
@@ -44,14 +45,16 @@ const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService')
|
||||
const fetchModels = async ({
|
||||
user,
|
||||
apiKey,
|
||||
baseURL,
|
||||
baseURL: _baseURL,
|
||||
name = EModelEndpoint.openAI,
|
||||
direct,
|
||||
azure = false,
|
||||
userIdQuery = false,
|
||||
createTokenConfig = true,
|
||||
tokenKey,
|
||||
}) => {
|
||||
let models = [];
|
||||
const baseURL = direct ? extractBaseURL(_baseURL) : _baseURL;
|
||||
|
||||
if (!baseURL && !azure) {
|
||||
return models;
|
||||
|
||||
@@ -33,7 +33,7 @@ async function withTimeout(promise, timeoutMs, timeoutMessage) {
|
||||
* @param {string} [params.body.model] - Optional. The ID of the model to be used for this run.
|
||||
* @param {string} [params.body.instructions] - Optional. Override the default system message of the assistant.
|
||||
* @param {string} [params.body.additional_instructions] - Optional. Appends additional instructions
|
||||
* at theend of the instructions for the run. This is useful for modifying
|
||||
* at the end of the instructions for the run. This is useful for modifying
|
||||
* the behavior on a per-run basis without overriding other instructions.
|
||||
* @param {Object[]} [params.body.tools] - Optional. Override the tools the assistant can use for this run.
|
||||
* @param {string[]} [params.body.file_ids] - Optional.
|
||||
|
||||
@@ -3,6 +3,7 @@ const path = require('path');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { zodToJsonSchema } = require('zod-to-json-schema');
|
||||
const { getToolkitKey, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const {
|
||||
@@ -11,7 +12,6 @@ const {
|
||||
ErrorTypes,
|
||||
ContentTypes,
|
||||
imageGenTools,
|
||||
EToolResources,
|
||||
EModelEndpoint,
|
||||
actionDelimiter,
|
||||
ImageVisionTool,
|
||||
@@ -33,36 +33,17 @@ const {
|
||||
toolkits,
|
||||
} = require('~/app/clients/tools');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { getEndpointsConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const {
|
||||
getEndpointsConfig,
|
||||
hasCustomUserVars,
|
||||
getCachedTools,
|
||||
} = require('~/server/services/Config');
|
||||
const { createOnSearchResults } = require('~/server/services/Tools/search');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { redactMessage } = require('~/config/parsers');
|
||||
|
||||
/**
|
||||
* @param {string} toolName
|
||||
* @returns {string | undefined} toolKey
|
||||
*/
|
||||
function getToolkitKey(toolName) {
|
||||
/** @type {string|undefined} */
|
||||
let toolkitKey;
|
||||
for (const toolkit of toolkits) {
|
||||
if (toolName.startsWith(EToolResources.image_edit)) {
|
||||
const splitMatches = toolkit.pluginKey.split('_');
|
||||
const suffix = splitMatches[splitMatches.length - 1];
|
||||
if (toolName.endsWith(suffix)) {
|
||||
toolkitKey = toolkit.pluginKey;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (toolName.startsWith(toolkit.pluginKey)) {
|
||||
toolkitKey = toolkit.pluginKey;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return toolkitKey;
|
||||
}
|
||||
const { findPluginAuthsByKeys } = require('~/models');
|
||||
|
||||
/**
|
||||
* Loads and formats tools from the specified tool directory.
|
||||
@@ -145,7 +126,7 @@ function loadAndFormatTools({ directory, adminFilter = [], adminIncluded = [] })
|
||||
for (const toolInstance of basicToolInstances) {
|
||||
const formattedTool = formatToOpenAIAssistantTool(toolInstance);
|
||||
let toolName = formattedTool[Tools.function].name;
|
||||
toolName = getToolkitKey(toolName) ?? toolName;
|
||||
toolName = getToolkitKey({ toolkits, toolName }) ?? toolName;
|
||||
if (filter.has(toolName) && included.size === 0) {
|
||||
continue;
|
||||
}
|
||||
@@ -493,11 +474,12 @@ async function processRequiredActions(client, requiredActions) {
|
||||
* @param {Object} params - Run params containing user and request information.
|
||||
* @param {ServerRequest} params.req - The request object.
|
||||
* @param {ServerResponse} params.res - The request object.
|
||||
* @param {AbortSignal} params.signal
|
||||
* @param {Pick<Agent, 'id' | 'provider' | 'model' | 'tools'} params.agent - The agent to load tools for.
|
||||
* @param {string | undefined} [params.openAIApiKey] - The OpenAI API key.
|
||||
* @returns {Promise<{ tools?: StructuredTool[] }>} The agent tools.
|
||||
* @returns {Promise<{ tools?: StructuredTool[]; userMCPAuthMap?: Record<string, Record<string, string>> }>} The agent tools.
|
||||
*/
|
||||
async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey }) {
|
||||
async function loadAgentTools({ req, res, agent, signal, tool_resources, openAIApiKey }) {
|
||||
if (!agent.tools || agent.tools.length === 0) {
|
||||
return {};
|
||||
} else if (agent.tools && agent.tools.length === 1 && agent.tools[0] === AgentCapabilities.ocr) {
|
||||
@@ -547,8 +529,20 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
webSearchCallbacks = createOnSearchResults(res);
|
||||
}
|
||||
|
||||
/** @type {Record<string, Record<string, string>>} */
|
||||
let userMCPAuthMap;
|
||||
if (await hasCustomUserVars()) {
|
||||
userMCPAuthMap = await getUserMCPAuthMap({
|
||||
tools: agent.tools,
|
||||
userId: req.user.id,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
}
|
||||
|
||||
const { loadedTools, toolContextMap } = await loadTools({
|
||||
agent,
|
||||
signal,
|
||||
userMCPAuthMap,
|
||||
functions: true,
|
||||
user: req.user.id,
|
||||
tools: _agentTools,
|
||||
@@ -612,6 +606,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
if (!checkCapability(AgentCapabilities.actions)) {
|
||||
return {
|
||||
tools: agentTools,
|
||||
userMCPAuthMap,
|
||||
toolContextMap,
|
||||
};
|
||||
}
|
||||
@@ -623,6 +618,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
}
|
||||
return {
|
||||
tools: agentTools,
|
||||
userMCPAuthMap,
|
||||
toolContextMap,
|
||||
};
|
||||
}
|
||||
@@ -731,6 +727,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
||||
return {
|
||||
tools: agentTools,
|
||||
toolContextMap,
|
||||
userMCPAuthMap,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
142
api/server/services/Tools/mcp.js
Normal file
142
api/server/services/Tools/mcp.js
Normal file
@@ -0,0 +1,142 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { findToken, createToken, updateToken, deleteTokens } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {string} params.serverName - The name of the MCP server
|
||||
* @param {boolean} params.returnOnOAuth - Whether to initiate OAuth and return, or wait for OAuth flow to finish
|
||||
* @param {AbortSignal} [params.signal] - The abort signal to handle cancellation.
|
||||
* @param {boolean} [params.forceNew]
|
||||
* @param {number} [params.connectionTimeout]
|
||||
* @param {FlowStateManager<any>} [params.flowManager]
|
||||
* @param {(authURL: string) => Promise<boolean>} [params.oauthStart]
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
*/
|
||||
async function reinitMCPServer({
|
||||
req,
|
||||
signal,
|
||||
forceNew,
|
||||
serverName,
|
||||
userMCPAuthMap,
|
||||
connectionTimeout,
|
||||
returnOnOAuth = true,
|
||||
oauthStart: _oauthStart,
|
||||
flowManager: _flowManager,
|
||||
}) {
|
||||
/** @type {MCPConnection | null} */
|
||||
let userConnection = null;
|
||||
/** @type {LCAvailableTools | null} */
|
||||
let availableTools = null;
|
||||
/** @type {ReturnType<MCPConnection['fetchTools']> | null} */
|
||||
let tools = null;
|
||||
let oauthRequired = false;
|
||||
let oauthUrl = null;
|
||||
try {
|
||||
const customUserVars = userMCPAuthMap?.[`${Constants.mcp_prefix}${serverName}`];
|
||||
const flowManager = _flowManager ?? getFlowStateManager(getLogStores(CacheKeys.FLOWS));
|
||||
const mcpManager = getMCPManager();
|
||||
|
||||
const oauthStart =
|
||||
_oauthStart ??
|
||||
(async (authURL) => {
|
||||
logger.info(`[MCP Reinitialize] OAuth URL received: ${authURL}`);
|
||||
oauthUrl = authURL;
|
||||
oauthRequired = true;
|
||||
});
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user: req.user,
|
||||
signal,
|
||||
forceNew,
|
||||
oauthStart,
|
||||
serverName,
|
||||
flowManager,
|
||||
returnOnOAuth,
|
||||
customUserVars,
|
||||
connectionTimeout,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`[MCP Reinitialize] Successfully established connection for ${serverName}`);
|
||||
} catch (err) {
|
||||
logger.info(`[MCP Reinitialize] getUserConnection threw error: ${err.message}`);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth state - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const isOAuthError =
|
||||
err.message?.includes('OAuth') ||
|
||||
err.message?.includes('authentication') ||
|
||||
err.message?.includes('401');
|
||||
|
||||
const isOAuthFlowInitiated = err.message === 'OAuth flow initiated - return early';
|
||||
|
||||
if (isOAuthError || oauthRequired || isOAuthFlowInitiated) {
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth required for ${serverName} (isOAuthError: ${isOAuthError}, oauthRequired: ${oauthRequired}, isOAuthFlowInitiated: ${isOAuthFlowInitiated})`,
|
||||
);
|
||||
oauthRequired = true;
|
||||
} else {
|
||||
logger.error(
|
||||
`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (userConnection && !oauthRequired) {
|
||||
tools = await userConnection.fetchTools();
|
||||
availableTools = await updateMCPUserTools({
|
||||
userId: req.user.id,
|
||||
serverName,
|
||||
tools,
|
||||
});
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`[MCP Reinitialize] Sending response for ${serverName} - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const getResponseMessage = () => {
|
||||
if (oauthRequired) {
|
||||
return `MCP server '${serverName}' ready for OAuth authentication`;
|
||||
}
|
||||
if (userConnection) {
|
||||
return `MCP server '${serverName}' reinitialized successfully`;
|
||||
}
|
||||
return `Failed to reinitialize MCP server '${serverName}'`;
|
||||
};
|
||||
|
||||
const result = {
|
||||
availableTools,
|
||||
success: Boolean((userConnection && !oauthRequired) || (oauthRequired && oauthUrl)),
|
||||
message: getResponseMessage(),
|
||||
oauthRequired,
|
||||
serverName,
|
||||
oauthUrl,
|
||||
tools,
|
||||
};
|
||||
logger.debug(`[MCP Reinitialize] Response for ${serverName}:`, result);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[MCP Reinitialize] Error loading MCP Tools, servers may still be initializing:',
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
reinitMCPServer,
|
||||
};
|
||||
@@ -1,9 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getCachedTools, setCachedTools } = require('./Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { createMCPManager } = require('~/config');
|
||||
const { mergeAppTools } = require('./Config');
|
||||
|
||||
/**
|
||||
* Initialize MCP servers
|
||||
@@ -15,55 +12,16 @@ async function initializeMCPs(app) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Filter out servers with startup: false
|
||||
const filteredServers = {};
|
||||
for (const [name, config] of Object.entries(mcpServers)) {
|
||||
if (config.startup === false) {
|
||||
logger.info(`Skipping MCP server '${name}' due to startup: false`);
|
||||
continue;
|
||||
}
|
||||
filteredServers[name] = config;
|
||||
}
|
||||
|
||||
if (Object.keys(filteredServers).length === 0) {
|
||||
logger.info('[MCP] No MCP servers to initialize (all skipped or none configured)');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Initializing MCP servers...');
|
||||
const mcpManager = getMCPManager();
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
||||
const mcpManager = await createMCPManager(mcpServers);
|
||||
|
||||
try {
|
||||
await mcpManager.initializeMCPs({
|
||||
mcpServers: filteredServers,
|
||||
flowManager,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
});
|
||||
|
||||
delete app.locals.mcpConfig;
|
||||
const availableTools = await getCachedTools();
|
||||
const mcpTools = mcpManager.getAppToolFunctions() || {};
|
||||
await mergeAppTools(mcpTools);
|
||||
|
||||
if (!availableTools) {
|
||||
logger.warn('No available tools found in cache during MCP initialization');
|
||||
return;
|
||||
}
|
||||
|
||||
const toolsCopy = { ...availableTools };
|
||||
await mcpManager.mapAvailableTools(toolsCopy, flowManager);
|
||||
await setCachedTools(toolsCopy, { isGlobal: true });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug('Cleared tools array cache after MCP initialization');
|
||||
|
||||
logger.info('MCP servers initialized successfully');
|
||||
logger.info(
|
||||
`MCP servers initialized successfully. Added ${Object.keys(mcpTools).length} MCP tools.`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize MCP servers:', error);
|
||||
}
|
||||
|
||||
@@ -1,21 +1,55 @@
|
||||
const {
|
||||
SystemRoles,
|
||||
Permissions,
|
||||
roleDefaults,
|
||||
PermissionTypes,
|
||||
isMemoryEnabled,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { updateAccessPermissions } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isMemoryEnabled } = require('@librechat/api');
|
||||
const { updateAccessPermissions, getRoleByName } = require('~/models/Role');
|
||||
|
||||
/**
|
||||
* Checks if a permission type has explicit configuration
|
||||
*/
|
||||
function hasExplicitConfig(interfaceConfig, permissionType) {
|
||||
switch (permissionType) {
|
||||
case PermissionTypes.PROMPTS:
|
||||
return interfaceConfig.prompts !== undefined;
|
||||
case PermissionTypes.BOOKMARKS:
|
||||
return interfaceConfig.bookmarks !== undefined;
|
||||
case PermissionTypes.MEMORIES:
|
||||
return interfaceConfig.memories !== undefined;
|
||||
case PermissionTypes.MULTI_CONVO:
|
||||
return interfaceConfig.multiConvo !== undefined;
|
||||
case PermissionTypes.AGENTS:
|
||||
return interfaceConfig.agents !== undefined;
|
||||
case PermissionTypes.TEMPORARY_CHAT:
|
||||
return interfaceConfig.temporaryChat !== undefined;
|
||||
case PermissionTypes.RUN_CODE:
|
||||
return interfaceConfig.runCode !== undefined;
|
||||
case PermissionTypes.WEB_SEARCH:
|
||||
return interfaceConfig.webSearch !== undefined;
|
||||
case PermissionTypes.PEOPLE_PICKER:
|
||||
return interfaceConfig.peoplePicker !== undefined;
|
||||
case PermissionTypes.MARKETPLACE:
|
||||
return interfaceConfig.marketplace !== undefined;
|
||||
case PermissionTypes.FILE_SEARCH:
|
||||
return interfaceConfig.fileSearch !== undefined;
|
||||
case PermissionTypes.FILE_CITATIONS:
|
||||
return interfaceConfig.fileCitations !== undefined;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the default interface object.
|
||||
* @param {TCustomConfig | undefined} config - The loaded custom configuration.
|
||||
* @param {TConfigDefaults} configDefaults - The custom configuration default values.
|
||||
* @param {SystemRoles} [roleName] - The role to load the default interface for, defaults to `'USER'`.
|
||||
* @returns {Promise<TCustomConfig['interface']>} The default interface object.
|
||||
*/
|
||||
async function loadDefaultInterface(config, configDefaults, roleName = SystemRoles.USER) {
|
||||
async function loadDefaultInterface(config, configDefaults) {
|
||||
const { interface: interfaceConfig } = config ?? {};
|
||||
const { interface: defaults } = configDefaults;
|
||||
const hasModelSpecs = config?.modelSpecs?.list?.length > 0;
|
||||
@@ -31,6 +65,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
|
||||
/** @type {TCustomConfig['interface']} */
|
||||
const loadedInterface = removeNullishValues({
|
||||
// UI elements - use schema defaults
|
||||
endpointsMenu:
|
||||
interfaceConfig?.endpointsMenu ?? (hasModelSpecs ? false : defaults.endpointsMenu),
|
||||
modelSelect:
|
||||
@@ -42,81 +77,168 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
|
||||
privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy,
|
||||
termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService,
|
||||
mcpServers: interfaceConfig?.mcpServers ?? defaults.mcpServers,
|
||||
bookmarks: interfaceConfig?.bookmarks ?? defaults.bookmarks,
|
||||
memories: shouldDisableMemories ? false : (interfaceConfig?.memories ?? defaults.memories),
|
||||
prompts: interfaceConfig?.prompts ?? defaults.prompts,
|
||||
multiConvo: interfaceConfig?.multiConvo ?? defaults.multiConvo,
|
||||
agents: interfaceConfig?.agents ?? defaults.agents,
|
||||
temporaryChat: interfaceConfig?.temporaryChat ?? defaults.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode ?? defaults.runCode,
|
||||
webSearch: interfaceConfig?.webSearch ?? defaults.webSearch,
|
||||
fileSearch: interfaceConfig?.fileSearch ?? defaults.fileSearch,
|
||||
fileCitations: interfaceConfig?.fileCitations ?? defaults.fileCitations,
|
||||
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
|
||||
peoplePicker: {
|
||||
admin: {
|
||||
users: interfaceConfig?.peoplePicker?.admin?.users ?? defaults.peoplePicker?.admin.users,
|
||||
groups: interfaceConfig?.peoplePicker?.admin?.groups ?? defaults.peoplePicker?.admin.groups,
|
||||
},
|
||||
user: {
|
||||
users: interfaceConfig?.peoplePicker?.user?.users ?? defaults.peoplePicker?.user.users,
|
||||
groups: interfaceConfig?.peoplePicker?.user?.groups ?? defaults.peoplePicker?.user.groups,
|
||||
},
|
||||
},
|
||||
marketplace: {
|
||||
admin: {
|
||||
use: interfaceConfig?.marketplace?.admin?.use ?? defaults.marketplace?.admin.use,
|
||||
},
|
||||
user: {
|
||||
use: interfaceConfig?.marketplace?.user?.use ?? defaults.marketplace?.user.use,
|
||||
},
|
||||
},
|
||||
|
||||
// Permissions - only include if explicitly configured
|
||||
bookmarks: interfaceConfig?.bookmarks,
|
||||
memories: shouldDisableMemories ? false : interfaceConfig?.memories,
|
||||
prompts: interfaceConfig?.prompts,
|
||||
multiConvo: interfaceConfig?.multiConvo,
|
||||
agents: interfaceConfig?.agents,
|
||||
temporaryChat: interfaceConfig?.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode,
|
||||
webSearch: interfaceConfig?.webSearch,
|
||||
fileSearch: interfaceConfig?.fileSearch,
|
||||
fileCitations: interfaceConfig?.fileCitations,
|
||||
peoplePicker: interfaceConfig?.peoplePicker,
|
||||
marketplace: interfaceConfig?.marketplace,
|
||||
});
|
||||
|
||||
await updateAccessPermissions(roleName, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: loadedInterface.memories,
|
||||
[Permissions.OPT_OUT]: isPersonalizationEnabled,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: loadedInterface.peoplePicker.user?.users,
|
||||
[Permissions.VIEW_GROUPS]: loadedInterface.peoplePicker.user?.groups,
|
||||
},
|
||||
[PermissionTypes.MARKETPLACE]: {
|
||||
[Permissions.USE]: loadedInterface.marketplace.user?.use,
|
||||
},
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
|
||||
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: loadedInterface.fileCitations },
|
||||
});
|
||||
await updateAccessPermissions(SystemRoles.ADMIN, {
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: loadedInterface.memories,
|
||||
[Permissions.OPT_OUT]: isPersonalizationEnabled,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
|
||||
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
|
||||
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
|
||||
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
|
||||
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: loadedInterface.peoplePicker.admin?.users,
|
||||
[Permissions.VIEW_GROUPS]: loadedInterface.peoplePicker.admin?.groups,
|
||||
},
|
||||
[PermissionTypes.MARKETPLACE]: {
|
||||
[Permissions.USE]: loadedInterface.marketplace.admin?.use,
|
||||
},
|
||||
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
|
||||
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: loadedInterface.fileCitations },
|
||||
});
|
||||
// Helper to get permission value with proper precedence
|
||||
const getPermissionValue = (configValue, roleDefault, schemaDefault) => {
|
||||
if (configValue !== undefined) return configValue;
|
||||
if (roleDefault !== undefined) return roleDefault;
|
||||
return schemaDefault;
|
||||
};
|
||||
|
||||
// Permission precedence order:
|
||||
// 1. Explicit user configuration (from librechat.yaml)
|
||||
// 2. Role-specific defaults (from roleDefaults)
|
||||
// 3. Interface schema defaults (from interfaceSchema.default())
|
||||
for (const roleName of [SystemRoles.USER, SystemRoles.ADMIN]) {
|
||||
const defaultPerms = roleDefaults[roleName].permissions;
|
||||
const existingRole = await getRoleByName(roleName);
|
||||
const existingPermissions = existingRole?.permissions || {};
|
||||
const permissionsToUpdate = {};
|
||||
|
||||
// Helper to add permission if it should be updated
|
||||
const addPermissionIfNeeded = (permType, permissions) => {
|
||||
const permTypeExists = existingPermissions[permType];
|
||||
const isExplicitlyConfigured =
|
||||
interfaceConfig && hasExplicitConfig(interfaceConfig, permType);
|
||||
|
||||
// Only update if: doesn't exist OR explicitly configured
|
||||
if (!permTypeExists || isExplicitlyConfigured) {
|
||||
permissionsToUpdate[permType] = permissions;
|
||||
if (!permTypeExists) {
|
||||
logger.debug(`Role '${roleName}': Setting up default permissions for '${permType}'`);
|
||||
} else if (isExplicitlyConfigured) {
|
||||
logger.debug(`Role '${roleName}': Applying explicit config for '${permType}'`);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Role '${roleName}': Preserving existing permissions for '${permType}'`);
|
||||
}
|
||||
};
|
||||
|
||||
// Build permissions for each type
|
||||
const allPermissions = {
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.prompts,
|
||||
defaultPerms[PermissionTypes.PROMPTS]?.[Permissions.USE],
|
||||
defaults.prompts,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.bookmarks,
|
||||
defaultPerms[PermissionTypes.BOOKMARKS]?.[Permissions.USE],
|
||||
defaults.bookmarks,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.memories,
|
||||
defaultPerms[PermissionTypes.MEMORIES]?.[Permissions.USE],
|
||||
defaults.memories,
|
||||
),
|
||||
[Permissions.OPT_OUT]: isPersonalizationEnabled,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.multiConvo,
|
||||
defaultPerms[PermissionTypes.MULTI_CONVO]?.[Permissions.USE],
|
||||
defaults.multiConvo,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.AGENTS]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.agents,
|
||||
defaultPerms[PermissionTypes.AGENTS]?.[Permissions.USE],
|
||||
defaults.agents,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.TEMPORARY_CHAT]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.temporaryChat,
|
||||
defaultPerms[PermissionTypes.TEMPORARY_CHAT]?.[Permissions.USE],
|
||||
defaults.temporaryChat,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.RUN_CODE]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.runCode,
|
||||
defaultPerms[PermissionTypes.RUN_CODE]?.[Permissions.USE],
|
||||
defaults.runCode,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.WEB_SEARCH]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.webSearch,
|
||||
defaultPerms[PermissionTypes.WEB_SEARCH]?.[Permissions.USE],
|
||||
defaults.webSearch,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.PEOPLE_PICKER]: {
|
||||
[Permissions.VIEW_USERS]: getPermissionValue(
|
||||
loadedInterface.peoplePicker?.users,
|
||||
defaultPerms[PermissionTypes.PEOPLE_PICKER]?.[Permissions.VIEW_USERS],
|
||||
defaults.peoplePicker?.users,
|
||||
),
|
||||
[Permissions.VIEW_GROUPS]: getPermissionValue(
|
||||
loadedInterface.peoplePicker?.groups,
|
||||
defaultPerms[PermissionTypes.PEOPLE_PICKER]?.[Permissions.VIEW_GROUPS],
|
||||
defaults.peoplePicker?.groups,
|
||||
),
|
||||
[Permissions.VIEW_ROLES]: getPermissionValue(
|
||||
loadedInterface.peoplePicker?.roles,
|
||||
defaultPerms[PermissionTypes.PEOPLE_PICKER]?.[Permissions.VIEW_ROLES],
|
||||
defaults.peoplePicker?.roles,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.MARKETPLACE]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.marketplace?.use,
|
||||
defaultPerms[PermissionTypes.MARKETPLACE]?.[Permissions.USE],
|
||||
defaults.marketplace?.use,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.FILE_SEARCH]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.fileSearch,
|
||||
defaultPerms[PermissionTypes.FILE_SEARCH]?.[Permissions.USE],
|
||||
defaults.fileSearch,
|
||||
),
|
||||
},
|
||||
[PermissionTypes.FILE_CITATIONS]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
loadedInterface.fileCitations,
|
||||
defaultPerms[PermissionTypes.FILE_CITATIONS]?.[Permissions.USE],
|
||||
defaults.fileCitations,
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
// Check and add each permission type if needed
|
||||
for (const [permType, permissions] of Object.entries(allPermissions)) {
|
||||
addPermissionIfNeeded(permType, permissions);
|
||||
}
|
||||
|
||||
// Update permissions if any need updating
|
||||
if (Object.keys(permissionsToUpdate).length > 0) {
|
||||
await updateAccessPermissions(roleName, permissionsToUpdate, existingRole);
|
||||
}
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
const logSettings = () => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
48
api/server/services/start/migration.js
Normal file
48
api/server/services/start/migration.js
Normal file
@@ -0,0 +1,48 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
logAgentMigrationWarning,
|
||||
logPromptMigrationWarning,
|
||||
checkAgentPermissionsMigration,
|
||||
checkPromptPermissionsMigration,
|
||||
} = require('@librechat/api');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { Agent, PromptGroup } = require('~/db/models');
|
||||
const { findRoleByIdentifier } = require('~/models');
|
||||
|
||||
/**
|
||||
* Check if permissions migrations are needed for shared resources
|
||||
* This runs at the end to ensure all systems are initialized
|
||||
*/
|
||||
async function checkMigrations() {
|
||||
try {
|
||||
const agentMigrationResult = await checkAgentPermissionsMigration({
|
||||
mongoose,
|
||||
methods: {
|
||||
findRoleByIdentifier,
|
||||
getProjectByName,
|
||||
},
|
||||
AgentModel: Agent,
|
||||
});
|
||||
logAgentMigrationWarning(agentMigrationResult);
|
||||
} catch (error) {
|
||||
logger.error('Failed to check agent permissions migration:', error);
|
||||
}
|
||||
try {
|
||||
const promptMigrationResult = await checkPromptPermissionsMigration({
|
||||
mongoose,
|
||||
methods: {
|
||||
findRoleByIdentifier,
|
||||
getProjectByName,
|
||||
},
|
||||
PromptGroupModel: PromptGroup,
|
||||
});
|
||||
logPromptMigrationWarning(promptMigrationResult);
|
||||
} catch (error) {
|
||||
logger.error('Failed to check prompt permissions migration:', error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkMigrations,
|
||||
};
|
||||
@@ -1,19 +1,48 @@
|
||||
const passport = require('passport');
|
||||
const session = require('express-session');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const {
|
||||
openIdJwtLogin,
|
||||
facebookLogin,
|
||||
discordLogin,
|
||||
setupOpenId,
|
||||
googleLogin,
|
||||
githubLogin,
|
||||
discordLogin,
|
||||
facebookLogin,
|
||||
appleLogin,
|
||||
setupSaml,
|
||||
openIdJwtLogin,
|
||||
} = require('~/strategies');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Configures OpenID Connect for the application.
|
||||
* @param {Express.Application} app - The Express application instance.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function configureOpenId(app) {
|
||||
logger.info('Configuring OpenID Connect...');
|
||||
const sessionOptions = {
|
||||
secret: process.env.OPENID_SESSION_SECRET,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
store: getLogStores(CacheKeys.OPENID_SESSION),
|
||||
};
|
||||
app.use(session(sessionOptions));
|
||||
app.use(passport.session());
|
||||
|
||||
const config = await setupOpenId();
|
||||
if (!config) {
|
||||
logger.error('OpenID Connect configuration failed - strategy not registered.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
logger.info('OpenID token reuse is enabled.');
|
||||
passport.use('openidJwt', openIdJwtLogin(config));
|
||||
}
|
||||
logger.info('OpenID Connect configured successfully.');
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -44,21 +73,7 @@ const configureSocialLogins = async (app) => {
|
||||
process.env.OPENID_SCOPE &&
|
||||
process.env.OPENID_SESSION_SECRET
|
||||
) {
|
||||
logger.info('Configuring OpenID Connect...');
|
||||
const sessionOptions = {
|
||||
secret: process.env.OPENID_SESSION_SECRET,
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
store: getLogStores(CacheKeys.OPENID_SESSION),
|
||||
};
|
||||
app.use(session(sessionOptions));
|
||||
app.use(passport.session());
|
||||
const config = await setupOpenId();
|
||||
if (isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
logger.info('OpenID token reuse is enabled.');
|
||||
passport.use('openidJwt', openIdJwtLogin(config));
|
||||
}
|
||||
logger.info('OpenID Connect configured.');
|
||||
await configureOpenId(app);
|
||||
}
|
||||
if (
|
||||
process.env.SAML_ENTRY_POINT &&
|
||||
|
||||
@@ -12,7 +12,7 @@ const jwtLogin = () =>
|
||||
},
|
||||
async (payload, done) => {
|
||||
try {
|
||||
const user = await getUserById(payload?.id, '-password -__v -totpSecret');
|
||||
const user = await getUserById(payload?.id, '-password -__v -totpSecret -backupCodes');
|
||||
if (user) {
|
||||
user.id = user._id.toString();
|
||||
if (!user.role) {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const fs = require('fs');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const LdapStrategy = require('passport-ldapauth');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles, ErrorTypes } = require('librechat-data-provider');
|
||||
const { createUser, findUser, updateUser, countUsers } = require('~/models');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const {
|
||||
LDAP_URL,
|
||||
@@ -90,6 +90,14 @@ const ldapLogin = new LdapStrategy(ldapOptions, async (userinfo, done) => {
|
||||
(LDAP_ID && userinfo[LDAP_ID]) || userinfo.uid || userinfo.sAMAccountName || userinfo.mail;
|
||||
|
||||
let user = await findUser({ ldapId });
|
||||
if (user && user.provider !== 'ldap') {
|
||||
logger.info(
|
||||
`[ldapStrategy] User ${user.email} already exists with provider ${user.provider}`,
|
||||
);
|
||||
return done(null, false, {
|
||||
message: ErrorTypes.AUTH_FAILED,
|
||||
});
|
||||
}
|
||||
|
||||
const fullNameAttributes = LDAP_FULL_NAME && LDAP_FULL_NAME.split(',');
|
||||
const fullName =
|
||||
|
||||
@@ -22,7 +22,7 @@ async function passportLogin(req, email, password, done) {
|
||||
return done(null, false, { message: validationError });
|
||||
}
|
||||
|
||||
const user = await findUser({ email: email.trim() });
|
||||
const user = await findUser({ email: email.trim() }, '+password');
|
||||
if (!user) {
|
||||
logError('Passport Local Strategy - User Not Found', { email });
|
||||
logger.error(`[Login] [Login failed] [Username: ${email}] [Request-IP: ${req.ip}]`);
|
||||
|
||||
@@ -3,9 +3,9 @@ const fetch = require('node-fetch');
|
||||
const passport = require('passport');
|
||||
const client = require('openid-client');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { hashToken, logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, ErrorTypes } = require('librechat-data-provider');
|
||||
const { Strategy: OpenIDStrategy } = require('openid-client/passport');
|
||||
const { isEnabled, safeStringify, logHeaders } = require('@librechat/api');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
@@ -99,11 +99,29 @@ class CustomOpenIDStrategy extends OpenIDStrategy {
|
||||
const hostAndProtocol = process.env.DOMAIN_SERVER;
|
||||
return new URL(`${hostAndProtocol}${req.originalUrl ?? req.url}`);
|
||||
}
|
||||
|
||||
authorizationRequestParams(req, options) {
|
||||
const params = super.authorizationRequestParams(req, options);
|
||||
if (options?.state && !params.has('state')) {
|
||||
params.set('state', options.state);
|
||||
}
|
||||
|
||||
if (process.env.OPENID_AUDIENCE) {
|
||||
params.set('audience', process.env.OPENID_AUDIENCE);
|
||||
logger.debug(
|
||||
`[openidStrategy] Adding audience to authorization request: ${process.env.OPENID_AUDIENCE}`,
|
||||
);
|
||||
}
|
||||
|
||||
/** Generate nonce for federated providers that require it */
|
||||
const shouldGenerateNonce = isEnabled(process.env.OPENID_GENERATE_NONCE);
|
||||
if (shouldGenerateNonce && !params.has('nonce') && this._sessionKey) {
|
||||
const crypto = require('crypto');
|
||||
const nonce = crypto.randomBytes(16).toString('hex');
|
||||
params.set('nonce', nonce);
|
||||
logger.debug('[openidStrategy] Generated nonce for federated provider:', nonce);
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
}
|
||||
@@ -268,12 +286,20 @@ function convertToUsername(input, defaultValue = '') {
|
||||
*/
|
||||
async function setupOpenId() {
|
||||
try {
|
||||
const shouldGenerateNonce = isEnabled(process.env.OPENID_GENERATE_NONCE);
|
||||
|
||||
/** @type {ClientMetadata} */
|
||||
const clientMetadata = {
|
||||
client_id: process.env.OPENID_CLIENT_ID,
|
||||
client_secret: process.env.OPENID_CLIENT_SECRET,
|
||||
};
|
||||
|
||||
if (shouldGenerateNonce) {
|
||||
clientMetadata.response_types = ['code'];
|
||||
clientMetadata.grant_types = ['authorization_code'];
|
||||
clientMetadata.token_endpoint_auth_method = 'client_secret_post';
|
||||
}
|
||||
|
||||
/** @type {Configuration} */
|
||||
openidConfig = await client.discovery(
|
||||
new URL(process.env.OPENID_ISSUER),
|
||||
@@ -289,11 +315,19 @@ async function setupOpenId() {
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const usePKCE = isEnabled(process.env.OPENID_USE_PKCE);
|
||||
logger.info(`[openidStrategy] OpenID authentication configuration`, {
|
||||
generateNonce: shouldGenerateNonce,
|
||||
reason: shouldGenerateNonce
|
||||
? 'OPENID_GENERATE_NONCE=true - Will generate nonce and use explicit metadata for federated providers'
|
||||
: 'OPENID_GENERATE_NONCE=false - Standard flow without explicit nonce or metadata',
|
||||
});
|
||||
|
||||
const openidLogin = new CustomOpenIDStrategy(
|
||||
{
|
||||
config: openidConfig,
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
callbackURL: process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL,
|
||||
clockTolerance: process.env.OPENID_CLOCK_TOLERANCE || 300,
|
||||
usePKCE,
|
||||
},
|
||||
async (tokenset, done) => {
|
||||
@@ -312,6 +346,14 @@ async function setupOpenId() {
|
||||
} for openidId: ${claims.sub}`,
|
||||
);
|
||||
}
|
||||
if (user != null && user.provider !== 'openid') {
|
||||
logger.info(
|
||||
`[openidStrategy] Attempted OpenID login by user ${user.email}, was registered with "${user.provider}" provider`,
|
||||
);
|
||||
return done(null, false, {
|
||||
message: ErrorTypes.AUTH_FAILED,
|
||||
});
|
||||
}
|
||||
const userinfo = {
|
||||
...claims,
|
||||
...(await getUserInfo(openidConfig, tokenset.access_token, claims.sub)),
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
const fetch = require('node-fetch');
|
||||
const jwtDecode = require('jsonwebtoken/decode');
|
||||
const { setupOpenId } = require('./openidStrategy');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { findUser, createUser, updateUser } = require('~/models');
|
||||
const { setupOpenId } = require('./openidStrategy');
|
||||
|
||||
// --- Mocks ---
|
||||
jest.mock('node-fetch');
|
||||
@@ -50,7 +51,7 @@ jest.mock('openid-client', () => {
|
||||
issuer: 'https://fake-issuer.com',
|
||||
// Add any other properties needed by the implementation
|
||||
}),
|
||||
fetchUserInfo: jest.fn().mockImplementation((config, accessToken, sub) => {
|
||||
fetchUserInfo: jest.fn().mockImplementation(() => {
|
||||
// Only return additional properties, but don't override any claims
|
||||
return Promise.resolve({});
|
||||
}),
|
||||
@@ -261,17 +262,20 @@ describe('setupOpenId', () => {
|
||||
});
|
||||
|
||||
it('should update an existing user on login', async () => {
|
||||
// Arrange – simulate that a user already exists
|
||||
// Arrange – simulate that a user already exists with openid provider
|
||||
const existingUser = {
|
||||
_id: 'existingUserId',
|
||||
provider: 'local',
|
||||
provider: 'openid',
|
||||
email: tokenset.claims().email,
|
||||
openidId: '',
|
||||
username: '',
|
||||
name: '',
|
||||
};
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
|
||||
if (
|
||||
query.openidId === tokenset.claims().sub ||
|
||||
(query.email === tokenset.claims().email && query.provider === 'openid')
|
||||
) {
|
||||
return existingUser;
|
||||
}
|
||||
return null;
|
||||
@@ -294,12 +298,38 @@ describe('setupOpenId', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should block login when email exists with different provider', async () => {
|
||||
// Arrange – simulate that a user exists with same email but different provider
|
||||
const existingUser = {
|
||||
_id: 'existingUserId',
|
||||
provider: 'google',
|
||||
email: tokenset.claims().email,
|
||||
googleId: 'some-google-id',
|
||||
username: 'existinguser',
|
||||
name: 'Existing User',
|
||||
};
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.email === tokenset.claims().email && !query.provider) {
|
||||
return existingUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the strategy rejects login
|
||||
expect(result.user).toBe(false);
|
||||
expect(result.details.message).toBe(ErrorTypes.AUTH_FAILED);
|
||||
expect(createUser).not.toHaveBeenCalled();
|
||||
expect(updateUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should enforce the required role and reject login if missing', async () => {
|
||||
// Arrange – simulate a token without the required role.
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['SomeOtherRole'],
|
||||
});
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user, details } = await validate(tokenset);
|
||||
@@ -310,9 +340,6 @@ describe('setupOpenId', () => {
|
||||
});
|
||||
|
||||
it('should attempt to download and save the avatar if picture is provided', async () => {
|
||||
// Arrange – ensure userinfo contains a picture URL
|
||||
const userinfo = tokenset.claims();
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
|
||||
@@ -22,9 +22,12 @@ const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||
const isLocal = fileStrategy === FileSources.local;
|
||||
|
||||
let updatedAvatar = false;
|
||||
if (isLocal && (oldUser.avatar === null || !oldUser.avatar.includes('?manual=true'))) {
|
||||
const hasManualFlag =
|
||||
typeof oldUser?.avatar === 'string' && oldUser.avatar.includes('?manual=true');
|
||||
|
||||
if (isLocal && (!oldUser?.avatar || !hasManualFlag)) {
|
||||
updatedAvatar = avatarUrl;
|
||||
} else if (!isLocal && (oldUser.avatar === null || !oldUser.avatar.includes('?manual=true'))) {
|
||||
} else if (!isLocal && (!oldUser?.avatar || !hasManualFlag)) {
|
||||
const userId = oldUser._id;
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
userId,
|
||||
|
||||
164
api/strategies/process.test.js
Normal file
164
api/strategies/process.test.js
Normal file
@@ -0,0 +1,164 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const { handleExistingUser } = require('./process');
|
||||
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/images/avatar', () => ({
|
||||
resizeAvatar: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models', () => ({
|
||||
updateUser: jest.fn(),
|
||||
createUser: jest.fn(),
|
||||
getUserById: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getBalanceConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { updateUser } = require('~/models');
|
||||
|
||||
describe('handleExistingUser', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
process.env.CDN_PROVIDER = FileSources.local;
|
||||
});
|
||||
|
||||
it('should handle null avatar without throwing error', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: null,
|
||||
};
|
||||
const avatarUrl = 'https://example.com/avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should handle undefined avatar without throwing error', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
// avatar is undefined
|
||||
};
|
||||
const avatarUrl = 'https://example.com/avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should not update avatar if it has manual=true flag', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: 'https://example.com/avatar.png?manual=true',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/new-avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update avatar for local storage when avatar has no manual flag', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: 'https://example.com/old-avatar.png',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/new-avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should process avatar for non-local storage', async () => {
|
||||
process.env.CDN_PROVIDER = 's3';
|
||||
|
||||
const mockProcessAvatar = jest.fn().mockResolvedValue('processed-avatar-url');
|
||||
getStrategyFunctions.mockReturnValue({ processAvatar: mockProcessAvatar });
|
||||
resizeAvatar.mockResolvedValue(Buffer.from('resized-image'));
|
||||
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: null,
|
||||
};
|
||||
const avatarUrl = 'https://example.com/avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(resizeAvatar).toHaveBeenCalledWith({
|
||||
userId: 'user123',
|
||||
input: avatarUrl,
|
||||
});
|
||||
expect(mockProcessAvatar).toHaveBeenCalledWith({
|
||||
buffer: Buffer.from('resized-image'),
|
||||
userId: 'user123',
|
||||
manual: 'false',
|
||||
});
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: 'processed-avatar-url' });
|
||||
});
|
||||
|
||||
it('should not update if avatar already has manual flag in non-local storage', async () => {
|
||||
process.env.CDN_PROVIDER = 's3';
|
||||
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: 'https://cdn.example.com/avatar.png?manual=true',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/new-avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(resizeAvatar).not.toHaveBeenCalled();
|
||||
expect(updateUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle avatar with query parameters but without manual flag', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: 'https://example.com/avatar.png?size=large&format=webp',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/new-avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should handle empty string avatar', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: '',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should handle avatar with manual=false parameter', async () => {
|
||||
const oldUser = {
|
||||
_id: 'user123',
|
||||
avatar: 'https://example.com/avatar.png?manual=false',
|
||||
};
|
||||
const avatarUrl = 'https://example.com/new-avatar.png';
|
||||
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
|
||||
expect(updateUser).toHaveBeenCalledWith('user123', { avatar: avatarUrl });
|
||||
});
|
||||
|
||||
it('should handle oldUser being null gracefully', async () => {
|
||||
const avatarUrl = 'https://example.com/avatar.png';
|
||||
|
||||
// This should throw an error when trying to access oldUser._id
|
||||
await expect(handleExistingUser(null, avatarUrl)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user