Compare commits

..

1 Commits

Author SHA1 Message Date
Danny Avila
c606dd831e feat: Add latest Claude models to Default List 2025-08-05 18:12:18 -04:00
408 changed files with 3582 additions and 37160 deletions

View File

@@ -15,20 +15,6 @@ HOST=localhost
PORT=3080
MONGO_URI=mongodb://127.0.0.1:27017/LibreChat
#The maximum number of connections in the connection pool. */
MONGO_MAX_POOL_SIZE=
#The minimum number of connections in the connection pool. */
MONGO_MIN_POOL_SIZE=
#The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
MONGO_MAX_CONNECTING=
#The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
MONGO_MAX_IDLE_TIME_MS=
#The maximum time in milliseconds that a thread can wait for a connection to become available. */
MONGO_WAIT_QUEUE_TIMEOUT_MS=
# Set to false to disable automatic index creation for all models associated with this connection. */
MONGO_AUTO_INDEX=
# Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
MONGO_AUTO_CREATE=
DOMAIN_CLIENT=http://localhost:3080
DOMAIN_SERVER=http://localhost:3080
@@ -479,21 +465,6 @@ OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for
# Set to true to use the OpenID Connect end session endpoint for logout
OPENID_USE_END_SESSION_ENDPOINT=
#========================#
# SharePoint Integration #
#========================#
# Requires Entra ID (OpenID) authentication to be configured
# Enable SharePoint file picker in chat and agent panels
# ENABLE_SHAREPOINT_FILEPICKER=true
# SharePoint tenant base URL (e.g., https://yourtenant.sharepoint.com)
# SHAREPOINT_BASE_URL=https://yourtenant.sharepoint.com
# Microsoft Graph API And SharePoint scopes for file picker
# SHAREPOINT_PICKER_SHAREPOINT_SCOPE==https://yourtenant.sharepoint.com/AllSites.Read
# SHAREPOINT_PICKER_GRAPH_SCOPE=Files.Read.All
#========================#
# SAML
# Note: If OpenID is enabled, SAML authentication will be automatically disabled.
@@ -521,21 +492,6 @@ SAML_IMAGE_URL=
# SAML_USE_AUTHN_RESPONSE_SIGNED=
#===============================================#
# Microsoft Graph API / Entra ID Integration #
#===============================================#
# Enable Entra ID people search integration in permissions/sharing system
# When enabled, the people picker will search both local database and Entra ID
USE_ENTRA_ID_FOR_PEOPLE_SEARCH=false
# When enabled, entra id groups owners will be considered as members of the group
ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS=false
# Microsoft Graph API scopes needed for people/group search
# Default scopes provide access to user profiles and group memberships
OPENID_GRAPH_SCOPES=User.Read,People.Read,GroupMember.Read.All
# LDAP
LDAP_URL=
LDAP_BIND_DN=

View File

@@ -4,13 +4,12 @@ name: Build Helm Charts on Tag
on:
push:
tags:
- "chart-*"
- "*"
jobs:
release:
permissions:
contents: write
packages: write
runs-on: ubuntu-latest
steps:
- name: Checkout
@@ -27,49 +26,15 @@ jobs:
uses: azure/setup-helm@v4
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
- name: Build Subchart Deps
run: |
cd helm/librechat
helm dependency build
cd ../librechat-rag-api
helm dependency build
cd helm/librechat-rag-api
helm dependency build
- name: Get Chart Version
id: chart-version
run: |
CHART_VERSION=$(echo "${{ github.ref_name }}" | cut -d'-' -f2)
echo "CHART_VERSION=${CHART_VERSION}" >> "$GITHUB_OUTPUT"
# Log in to GitHub Container Registry
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
- name: Run chart-releaser
uses: helm/chart-releaser-action@v1.6.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Run Helm OCI Charts Releaser
# This is for the librechat chart
- name: Release Helm OCI Charts for librechat
uses: appany/helm-oci-chart-releaser@v0.4.2
with:
name: librechat
repository: ${{ github.actor }}/librechat-chart
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
path: helm/librechat
registry: ghcr.io
registry_username: ${{ github.actor }}
registry_password: ${{ secrets.GITHUB_TOKEN }}
# this is for the librechat-rag-api chart
- name: Release Helm OCI Charts for librechat-rag-api
uses: appany/helm-oci-chart-releaser@v0.4.2
with:
name: librechat-rag-api
repository: ${{ github.actor }}/librechat-chart
tag: ${{ steps.chart-version.outputs.CHART_VERSION }}
path: helm/librechat-rag-api
registry: ghcr.io
registry_username: ${{ github.actor }}
registry_password: ${{ secrets.GITHUB_TOKEN }}
charts_dir: helm
skip_existing: true
env:
CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}"

3
.gitignore vendored
View File

@@ -13,9 +13,6 @@ pids
*.seed
.git
# CI/CD data
test-image*
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

3
.vscode/launch.json vendored
View File

@@ -8,8 +8,7 @@
"skipFiles": ["<node_internals>/**"],
"program": "${workspaceFolder}/api/server/index.js",
"env": {
"NODE_ENV": "production",
"NODE_TLS_REJECT_UNAUTHORIZED": "0"
"NODE_ENV": "production"
},
"console": "integratedTerminal",
"envFile": "${workspaceFolder}/.env"

View File

@@ -1,4 +1,4 @@
# v0.8.0-rc2
# v0.8.0-rc1
# Base node image
FROM node:20-alpine AS node

View File

@@ -1,5 +1,5 @@
# Dockerfile.multi
# v0.8.0-rc2
# v0.8.0-rc1
# Base for all builds
FROM node:20-alpine AS base-min

View File

@@ -1222,9 +1222,7 @@ ${convo}
}
if (this.isOmni === true && modelOptions.max_tokens != null) {
const paramName =
modelOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
modelOptions[paramName] = modelOptions.max_tokens;
modelOptions.max_completion_tokens = modelOptions.max_tokens;
delete modelOptions.max_tokens;
}
if (this.isOmni === true && modelOptions.temperature != null) {

View File

@@ -3,7 +3,6 @@ const axios = require('axios');
const { tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { Tools, EToolResources } = require('librechat-data-provider');
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
const { generateShortLivedToken } = require('~/server/services/AuthService');
const { getFiles } = require('~/models/File');
@@ -23,24 +22,14 @@ const primeFiles = async (options) => {
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
const agentResourceIds = new Set(file_ids);
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
// Get all files first
const allFiles = (await getFiles({ file_id: { $in: file_ids } }, null, { text: 0 })) ?? [];
// Filter by access if user and agent are provided
let dbFiles;
if (req?.user?.id && agentId) {
dbFiles = await filterFilesByAgentAccess({
files: allFiles,
userId: req.user.id,
role: req.user.role,
agentId,
});
} else {
dbFiles = allFiles;
}
dbFiles = dbFiles.concat(resourceFiles);
const dbFiles = (
(await getFiles(
{ file_id: { $in: file_ids } },
null,
{ text: 0 },
{ userId: req?.user?.id, agentId },
)) ?? []
).concat(resourceFiles);
let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;
@@ -125,13 +114,11 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
}
const formattedResults = validResults
.flatMap((result, fileIndex) =>
.flatMap((result) =>
result.data.map(([docInfo, distance]) => ({
filename: docInfo.metadata.source.split('/').pop(),
content: docInfo.page_content,
distance,
file_id: files[fileIndex]?.file_id,
page: docInfo.metadata.page || null,
})),
)
// TODO: results should be sorted by relevance, not distance
@@ -141,37 +128,18 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
const formattedString = formattedResults
.map(
(result, index) =>
`File: ${result.filename}\nAnchor: \\ue202turn0file${index} (${result.filename})\nRelevance: ${(1.0 - result.distance).toFixed(4)}\nContent: ${
(result) =>
`File: ${result.filename}\nRelevance: ${1.0 - result.distance.toFixed(4)}\nContent: ${
result.content
}\n`,
)
.join('\n---\n');
const sources = formattedResults.map((result) => ({
type: 'file',
fileId: result.file_id,
content: result.content,
fileName: result.filename,
relevance: 1.0 - result.distance,
pages: result.page ? [result.page] : [],
pageRelevance: result.page ? { [result.page]: 1.0 - result.distance } : {},
}));
return [formattedString, { [Tools.file_search]: { sources } }];
return formattedString;
},
{
name: Tools.file_search,
responseFormat: 'content_and_artifact',
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.
**CITE FILE SEARCH RESULTS:**
Use anchor markers immediately after statements derived from file content. Reference the filename in your text:
- File citation: "The document.pdf states that... \\ue202turn0file0"
- Page reference: "According to report.docx... \\ue202turn0file1"
- Multi-file: "Multiple sources confirm... \\ue200\\ue202turn0file0\\ue202turn0file1\\ue201"
**ALWAYS mention the filename in your text before the citation marker. NEVER use markdown links or footnotes.**`,
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.`,
schema: z.object({
query: z
.string()

View File

@@ -1,34 +1,11 @@
require('dotenv').config();
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const mongoose = require('mongoose');
const MONGO_URI = process.env.MONGO_URI;
if (!MONGO_URI) {
throw new Error('Please define the MONGO_URI environment variable');
}
/** The maximum number of connections in the connection pool. */
const maxPoolSize = parseInt(process.env.MONGO_MAX_POOL_SIZE) || undefined;
/** The minimum number of connections in the connection pool. */
const minPoolSize = parseInt(process.env.MONGO_MIN_POOL_SIZE) || undefined;
/** The maximum number of connections that may be in the process of being established concurrently by the connection pool. */
const maxConnecting = parseInt(process.env.MONGO_MAX_CONNECTING) || undefined;
/** The maximum number of milliseconds that a connection can remain idle in the pool before being removed and closed. */
const maxIdleTimeMS = parseInt(process.env.MONGO_MAX_IDLE_TIME_MS) || undefined;
/** The maximum time in milliseconds that a thread can wait for a connection to become available. */
const waitQueueTimeoutMS = parseInt(process.env.MONGO_WAIT_QUEUE_TIMEOUT_MS) || undefined;
/** Set to false to disable automatic index creation for all models associated with this connection. */
const autoIndex =
process.env.MONGO_AUTO_INDEX != undefined
? isEnabled(process.env.MONGO_AUTO_INDEX) || false
: undefined;
/** Set to `false` to disable Mongoose automatically calling `createCollection()` on every model created on this connection. */
const autoCreate =
process.env.MONGO_AUTO_CREATE != undefined
? isEnabled(process.env.MONGO_AUTO_CREATE) || false
: undefined;
/**
* Global is used here to maintain a cached connection across hot reloads
* in development. This prevents connections growing exponentially
@@ -49,21 +26,13 @@ async function connectDb() {
if (!cached.promise || disconnected) {
const opts = {
bufferCommands: false,
...(maxPoolSize ? { maxPoolSize } : {}),
...(minPoolSize ? { minPoolSize } : {}),
...(maxConnecting ? { maxConnecting } : {}),
...(maxIdleTimeMS ? { maxIdleTimeMS } : {}),
...(waitQueueTimeoutMS ? { waitQueueTimeoutMS } : {}),
...(autoIndex != undefined ? { autoIndex } : {}),
...(autoCreate != undefined ? { autoCreate } : {}),
// useNewUrlParser: true,
// useUnifiedTopology: true,
// bufferMaxEntries: 0,
// useFindAndModify: true,
// useCreateIndex: true
};
logger.info('Mongo Connection options');
logger.info(JSON.stringify(opts, null, 2));
mongoose.set('strictQuery', true);
cached.promise = mongoose.connect(MONGO_URI, opts).then((mongoose) => {
return mongoose;

View File

@@ -3,7 +3,6 @@ module.exports = {
clearMocks: true,
roots: ['<rootDir>'],
coverageDirectory: 'coverage',
testTimeout: 30000, // 30 seconds timeout for all tests
setupFiles: [
'./test/jestSetup.js',
'./test/__mocks__/logger.js',

View File

@@ -1,17 +1,18 @@
const mongoose = require('mongoose');
const crypto = require('node:crypto');
const { logger } = require('@librechat/data-schemas');
const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
const { SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
require('librechat-data-provider').Constants;
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
const {
removeAgentFromAllProjects,
removeAgentIdsFromProject,
addAgentIdsToProject,
getProjectByName,
addAgentIdsToProject,
removeAgentIdsFromProject,
removeAgentFromAllProjects,
} = require('./Project');
const { removeAllPermissions } = require('~/server/services/PermissionService');
const { getCachedTools } = require('~/server/services/Config');
const getLogStores = require('~/cache/getLogStores');
const { getActions } = require('./Action');
const { Agent } = require('~/db/models');
@@ -22,7 +23,7 @@ const { Agent } = require('~/db/models');
* @throws {Error} If the agent creation fails.
*/
const createAgent = async (agentData) => {
const { author: _author, ...versionData } = agentData;
const { author, ...versionData } = agentData;
const timestamp = new Date();
const initialAgentData = {
...agentData,
@@ -33,9 +34,7 @@ const createAgent = async (agentData) => {
updatedAt: timestamp,
},
],
category: agentData.category || 'general',
};
return (await Agent.create(initialAgentData)).toObject();
};
@@ -132,7 +131,29 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
}
agent.version = agent.versions ? agent.versions.length : 0;
return agent;
if (agent.author.toString() === req.user.id) {
return agent;
}
if (!agent.projectIds) {
return null;
}
const cache = getLogStores(CONFIG_STORE);
/** @type {TStartupConfig} */
const cachedStartupConfig = await cache.get(STARTUP_CONFIG);
let { instanceProjectId } = cachedStartupConfig ?? {};
if (!instanceProjectId) {
instanceProjectId = (await getProjectByName(GLOBAL_PROJECT_NAME, '_id'))._id.toString();
}
for (const projectObjectId of agent.projectIds) {
const projectId = projectObjectId.toString();
if (projectId === instanceProjectId) {
return agent;
}
}
};
/**
@@ -162,7 +183,7 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
'actionsHash', // Exclude actionsHash from direct comparison
];
const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
return null;
@@ -181,116 +202,54 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
let isMatch = true;
for (const field of importantFields) {
const wouldBeValue = wouldBeVersion[field];
const lastVersionValue = lastVersion[field];
// Skip if both are undefined/null
if (!wouldBeValue && !lastVersionValue) {
if (!wouldBeVersion[field] && !lastVersion[field]) {
continue;
}
// Handle arrays
if (Array.isArray(wouldBeValue) || Array.isArray(lastVersionValue)) {
// Normalize: treat undefined/null as empty array for comparison
let wouldBeArr;
if (Array.isArray(wouldBeValue)) {
wouldBeArr = wouldBeValue;
} else if (wouldBeValue == null) {
wouldBeArr = [];
} else {
wouldBeArr = [wouldBeValue];
}
let lastVersionArr;
if (Array.isArray(lastVersionValue)) {
lastVersionArr = lastVersionValue;
} else if (lastVersionValue == null) {
lastVersionArr = [];
} else {
lastVersionArr = [lastVersionValue];
}
if (wouldBeArr.length !== lastVersionArr.length) {
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
if (wouldBeVersion[field].length !== lastVersion[field].length) {
isMatch = false;
break;
}
// Special handling for projectIds (MongoDB ObjectIds)
if (field === 'projectIds') {
const wouldBeIds = wouldBeArr.map((id) => id.toString()).sort();
const versionIds = lastVersionArr.map((id) => id.toString()).sort();
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
isMatch = false;
break;
}
}
// Handle arrays of objects
else if (
wouldBeArr.length > 0 &&
typeof wouldBeArr[0] === 'object' &&
wouldBeArr[0] !== null
) {
const sortedWouldBe = [...wouldBeArr].map((item) => JSON.stringify(item)).sort();
const sortedVersion = [...lastVersionArr].map((item) => JSON.stringify(item)).sort();
// Handle arrays of objects like tool_kwargs
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
isMatch = false;
break;
}
} else {
const sortedWouldBe = [...wouldBeArr].sort();
const sortedVersion = [...lastVersionArr].sort();
const sortedWouldBe = [...wouldBeVersion[field]].sort();
const sortedVersion = [...lastVersion[field]].sort();
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
isMatch = false;
break;
}
}
}
// Handle objects
else if (typeof wouldBeValue === 'object' && wouldBeValue !== null) {
const lastVersionObj =
typeof lastVersionValue === 'object' && lastVersionValue !== null ? lastVersionValue : {};
// For empty objects, normalize the comparison
const wouldBeKeys = Object.keys(wouldBeValue);
const lastVersionKeys = Object.keys(lastVersionObj);
// If both are empty objects, they're equal
if (wouldBeKeys.length === 0 && lastVersionKeys.length === 0) {
continue;
}
// Otherwise do a deep comparison
if (JSON.stringify(wouldBeValue) !== JSON.stringify(lastVersionObj)) {
isMatch = false;
break;
}
}
// Handle primitive values
else {
// For primitives, handle the case where one is undefined and the other is a default value
if (wouldBeValue !== lastVersionValue) {
// Special handling for boolean false vs undefined
if (
typeof wouldBeValue === 'boolean' &&
wouldBeValue === false &&
lastVersionValue === undefined
) {
continue;
}
// Special handling for empty string vs undefined
if (
typeof wouldBeValue === 'string' &&
wouldBeValue === '' &&
lastVersionValue === undefined
) {
continue;
}
} else if (field === 'model_parameters') {
const wouldBeParams = wouldBeVersion[field] || {};
const lastVersionParams = lastVersion[field] || {};
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
isMatch = false;
break;
}
} else if (wouldBeVersion[field] !== lastVersion[field]) {
isMatch = false;
break;
}
}
@@ -319,14 +278,7 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
const currentAgent = await Agent.findOne(searchParameter);
if (currentAgent) {
const {
__v,
_id,
id: __id,
versions,
author: _author,
...versionData
} = currentAgent.toObject();
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
let actionsHash = null;
@@ -364,10 +316,17 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
if (shouldCreateVersion) {
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
if (duplicateVersion && !forceVersion) {
// No changes detected, return the current agent without creating a new version
const agentObj = currentAgent.toObject();
agentObj.version = versions.length;
return agentObj;
const error = new Error(
'Duplicate version: This would create a version identical to an existing one',
);
error.statusCode = 409;
error.details = {
duplicateVersion,
versionIndex: versions.findIndex(
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
),
};
throw error;
}
}
@@ -506,117 +465,12 @@ const deleteAgent = async (searchParameter) => {
const agent = await Agent.findOneAndDelete(searchParameter);
if (agent) {
await removeAgentFromAllProjects(agent.id);
await removeAllPermissions({
resourceType: ResourceType.AGENT,
resourceId: agent._id,
});
}
return agent;
};
/**
* Get agents by accessible IDs with optional cursor-based pagination.
* @param {Object} params - The parameters for getting accessible agents.
* @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
* @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
* @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
*/
const getListAgentsByAccess = async ({
accessibleIds = [],
otherParams = {},
limit = null,
after = null,
}) => {
const isPaginated = limit !== null && limit !== undefined;
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
// Build base query combining ACL accessible agents with other filters
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
// Add cursor condition
if (after) {
try {
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
const { updatedAt, _id } = cursor;
const cursorCondition = {
$or: [
{ updatedAt: { $lt: new Date(updatedAt) } },
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
],
};
// Merge cursor condition with base query
if (Object.keys(baseQuery).length > 0) {
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
// Remove the original conditions from baseQuery to avoid duplication
Object.keys(baseQuery).forEach((key) => {
if (key !== '$and') delete baseQuery[key];
});
} else {
Object.assign(baseQuery, cursorCondition);
}
} catch (error) {
logger.warn('Invalid cursor:', error.message);
}
}
let query = Agent.find(baseQuery, {
id: 1,
_id: 1,
name: 1,
avatar: 1,
author: 1,
projectIds: 1,
description: 1,
updatedAt: 1,
category: 1,
support_contact: 1,
is_promoted: 1,
}).sort({ updatedAt: -1, _id: 1 });
// Only apply limit if pagination is requested
if (isPaginated) {
query = query.limit(normalizedLimit + 1);
}
const agents = await query.lean();
const hasMore = isPaginated ? agents.length > normalizedLimit : false;
const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
if (agent.author) {
agent.author = agent.author.toString();
}
return agent;
});
// Generate next cursor only if paginated
let nextCursor = null;
if (isPaginated && hasMore && data.length > 0) {
const lastAgent = agents[normalizedLimit - 1];
nextCursor = Buffer.from(
JSON.stringify({
updatedAt: lastAgent.updatedAt.toISOString(),
_id: lastAgent._id.toString(),
}),
).toString('base64');
}
return {
object: 'list',
data,
first_id: data.length > 0 ? data[0].id : null,
last_id: data.length > 0 ? data[data.length - 1].id : null,
has_more: hasMore,
after: nextCursor,
};
};
/**
* Get all agents.
* @deprecated Use getListAgentsByAccess for ACL-aware agent listing
* @param {Object} searchParameter - The search parameters to find matching agents.
* @param {string} searchParameter.author - The user ID of the agent's author.
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
@@ -635,15 +489,13 @@ const getListAgents = async (searchParameter) => {
const agents = (
await Agent.find(query, {
id: 1,
_id: 1,
_id: 0,
name: 1,
avatar: 1,
author: 1,
projectIds: 1,
description: 1,
// @deprecated - isCollaborative replaced by ACL permissions
isCollaborative: 1,
category: 1,
}).lean()
).map((agent) => {
if (agent.author?.toString() !== author) {
@@ -809,14 +661,6 @@ const generateActionMetadataHash = async (actionIds, actions) => {
return hashHex;
};
/**
* Counts the number of promoted agents.
* @returns {Promise<number>} - The count of promoted agents
*/
const countPromotedAgents = async () => {
const count = await Agent.countDocuments({ is_promoted: true });
return count;
};
/**
* Load a default agent based on the endpoint
@@ -834,8 +678,6 @@ module.exports = {
revertAgentVersion,
updateAgentProjects,
addAgentResourceFile,
getListAgentsByAccess,
removeAgentResourceFiles,
generateActionMetadataHash,
countPromotedAgents,
};

View File

@@ -14,7 +14,6 @@ const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { agentSchema } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
const {
getAgent,
loadAgent,
@@ -22,16 +21,13 @@ const {
updateAgent,
deleteAgent,
getListAgents,
getListAgentsByAccess,
revertAgentVersion,
updateAgentProjects,
addAgentResourceFile,
removeAgentResourceFiles,
generateActionMetadataHash,
revertAgentVersion,
} = require('./Agent');
const permissionService = require('~/server/services/PermissionService');
const { getCachedTools } = require('~/server/services/Config');
const { AclEntry } = require('~/db/models');
/**
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
@@ -411,26 +407,12 @@ describe('models/Agent', () => {
describe('Agent CRUD Operations', () => {
let mongoServer;
let AccessRole;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
await mongoose.connect(mongoUri);
// Initialize models
const dbModels = require('~/db/models');
AccessRole = dbModels.AccessRole;
// Create necessary access roles for agents
await AccessRole.create({
accessRoleId: AccessRoleIds.AGENT_OWNER,
name: 'Owner',
description: 'Full control over agents',
resourceType: ResourceType.AGENT,
permBits: 15, // VIEW | EDIT | DELETE | SHARE
});
}, 20000);
afterAll(async () => {
@@ -486,51 +468,6 @@ describe('models/Agent', () => {
expect(agentAfterDelete).toBeNull();
});
test('should remove ACL entries when deleting an agent', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent
const agent = await createAgent({
id: agentId,
name: 'Agent With Permissions',
provider: 'test',
model: 'test-model',
author: authorId,
});
// Grant permissions (simulating sharing)
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: authorId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_OWNER,
grantedBy: authorId,
});
// Verify ACL entry exists
const aclEntriesBefore = await AclEntry.find({
resourceType: ResourceType.AGENT,
resourceId: agent._id,
});
expect(aclEntriesBefore).toHaveLength(1);
// Delete the agent
await deleteAgent({ id: agentId });
// Verify agent is deleted
const agentAfterDelete = await getAgent({ id: agentId });
expect(agentAfterDelete).toBeNull();
// Verify ACL entries are removed
const aclEntriesAfter = await AclEntry.find({
resourceType: ResourceType.AGENT,
resourceId: agent._id,
});
expect(aclEntriesAfter).toHaveLength(0);
});
test('should list agents by author', async () => {
const authorId = new mongoose.Types.ObjectId();
const otherAuthorId = new mongoose.Types.ObjectId();
@@ -942,31 +879,45 @@ describe('models/Agent', () => {
expect(emptyParamsAgent.model_parameters).toEqual({});
});
test('should not create new version for duplicate updates', async () => {
const authorId = new mongoose.Types.ObjectId();
const testCases = generateVersionTestCases();
test('should detect duplicate versions and reject updates', async () => {
const originalConsoleError = console.error;
console.error = jest.fn();
for (const testCase of testCases) {
const testAgentId = `agent_${uuidv4()}`;
try {
const authorId = new mongoose.Types.ObjectId();
const testCases = generateVersionTestCases();
await createAgent({
id: testAgentId,
provider: 'test',
model: 'test-model',
author: authorId,
...testCase.initial,
});
for (const testCase of testCases) {
const testAgentId = `agent_${uuidv4()}`;
const updatedAgent = await updateAgent({ id: testAgentId }, testCase.update);
expect(updatedAgent.versions).toHaveLength(2); // No new version created
await createAgent({
id: testAgentId,
provider: 'test',
model: 'test-model',
author: authorId,
...testCase.initial,
});
// Update with duplicate data should succeed but not create a new version
const duplicateUpdate = await updateAgent({ id: testAgentId }, testCase.duplicate);
await updateAgent({ id: testAgentId }, testCase.update);
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
let error;
try {
await updateAgent({ id: testAgentId }, testCase.duplicate);
} catch (e) {
error = e;
}
const agent = await getAgent({ id: testAgentId });
expect(agent.versions).toHaveLength(2);
expect(error).toBeDefined();
expect(error.message).toContain('Duplicate version');
expect(error.statusCode).toBe(409);
expect(error.details).toBeDefined();
expect(error.details.duplicateVersion).toBeDefined();
const agent = await getAgent({ id: testAgentId });
expect(agent.versions).toHaveLength(2);
}
} finally {
console.error = originalConsoleError;
}
});
@@ -1142,13 +1093,20 @@ describe('models/Agent', () => {
expect(secondUpdate.versions).toHaveLength(3);
// Update without forceVersion and no changes should not create a version
const duplicateUpdate = await updateAgent(
{ id: agentId },
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
{ updatingUserId: authorId.toString(), forceVersion: false },
);
let error;
try {
await updateAgent(
{ id: agentId },
{ tools: ['listEvents_action_test.com', 'createEvent_action_test.com'] },
{ updatingUserId: authorId.toString(), forceVersion: false },
);
} catch (e) {
error = e;
}
expect(duplicateUpdate.versions).toHaveLength(3); // No new version created
expect(error).toBeDefined();
expect(error.message).toContain('Duplicate version');
expect(error.statusCode).toBe(409);
});
test('should handle isDuplicateVersion with arrays containing null/undefined values', async () => {
@@ -1300,335 +1258,6 @@ describe('models/Agent', () => {
expect(secondUpdate.versions).toHaveLength(3);
});
test('should detect changes in support_contact fields', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent with initial support_contact
await createAgent({
id: agentId,
name: 'Agent with Support Contact',
provider: 'test',
model: 'test-model',
author: authorId,
support_contact: {
name: 'Initial Support',
email: 'initial@support.com',
},
});
// Update support_contact name only
const firstUpdate = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Updated Support',
email: 'initial@support.com',
},
},
);
expect(firstUpdate.versions).toHaveLength(2);
expect(firstUpdate.support_contact.name).toBe('Updated Support');
expect(firstUpdate.support_contact.email).toBe('initial@support.com');
// Update support_contact email only
const secondUpdate = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Updated Support',
email: 'updated@support.com',
},
},
);
expect(secondUpdate.versions).toHaveLength(3);
expect(secondUpdate.support_contact.email).toBe('updated@support.com');
// Try to update with same support_contact - should be detected as duplicate but return successfully
const duplicateUpdate = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Updated Support',
email: 'updated@support.com',
},
},
);
// Should not create a new version
expect(duplicateUpdate.versions).toHaveLength(3);
expect(duplicateUpdate.version).toBe(3);
expect(duplicateUpdate.support_contact.email).toBe('updated@support.com');
});
test('should handle support_contact from empty to populated', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent without support_contact
const agent = await createAgent({
id: agentId,
name: 'Agent without Support',
provider: 'test',
model: 'test-model',
author: authorId,
});
// Verify support_contact is undefined since it wasn't provided
expect(agent.support_contact).toBeUndefined();
// Update to add support_contact
const updated = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'New Support Team',
email: 'support@example.com',
},
},
);
expect(updated.versions).toHaveLength(2);
expect(updated.support_contact.name).toBe('New Support Team');
expect(updated.support_contact.email).toBe('support@example.com');
});
test('should handle support_contact edge cases in isDuplicateVersion', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent with support_contact
await createAgent({
id: agentId,
name: 'Edge Case Agent',
provider: 'test',
model: 'test-model',
author: authorId,
support_contact: {
name: 'Support',
email: 'support@test.com',
},
});
// Update to empty support_contact
const emptyUpdate = await updateAgent(
{ id: agentId },
{
support_contact: {},
},
);
expect(emptyUpdate.versions).toHaveLength(2);
expect(emptyUpdate.support_contact).toEqual({});
// Update back to populated support_contact
const repopulated = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Support',
email: 'support@test.com',
},
},
);
expect(repopulated.versions).toHaveLength(3);
// Verify all versions have correct support_contact
const finalAgent = await getAgent({ id: agentId });
expect(finalAgent.versions[0].support_contact).toEqual({
name: 'Support',
email: 'support@test.com',
});
expect(finalAgent.versions[1].support_contact).toEqual({});
expect(finalAgent.versions[2].support_contact).toEqual({
name: 'Support',
email: 'support@test.com',
});
});
test('should preserve support_contact in version history', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent
await createAgent({
id: agentId,
name: 'Version History Test',
provider: 'test',
model: 'test-model',
author: authorId,
support_contact: {
name: 'Initial Contact',
email: 'initial@test.com',
},
});
// Multiple updates with different support_contact values
await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Second Contact',
email: 'second@test.com',
},
},
);
await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'Third Contact',
email: 'third@test.com',
},
},
);
const finalAgent = await getAgent({ id: agentId });
// Verify version history
expect(finalAgent.versions).toHaveLength(3);
expect(finalAgent.versions[0].support_contact).toEqual({
name: 'Initial Contact',
email: 'initial@test.com',
});
expect(finalAgent.versions[1].support_contact).toEqual({
name: 'Second Contact',
email: 'second@test.com',
});
expect(finalAgent.versions[2].support_contact).toEqual({
name: 'Third Contact',
email: 'third@test.com',
});
// Current state should match last version
expect(finalAgent.support_contact).toEqual({
name: 'Third Contact',
email: 'third@test.com',
});
});
test('should handle partial support_contact updates', async () => {
const agentId = `agent_${uuidv4()}`;
const authorId = new mongoose.Types.ObjectId();
// Create agent with full support_contact
await createAgent({
id: agentId,
name: 'Partial Update Test',
provider: 'test',
model: 'test-model',
author: authorId,
support_contact: {
name: 'Original Name',
email: 'original@email.com',
},
});
// MongoDB's findOneAndUpdate will replace the entire support_contact object
// So we need to verify that partial updates still work correctly
const updated = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'New Name',
email: '', // Empty email
},
},
);
expect(updated.versions).toHaveLength(2);
expect(updated.support_contact.name).toBe('New Name');
expect(updated.support_contact.email).toBe('');
// Verify isDuplicateVersion works with partial changes - should return successfully without creating new version
const duplicateUpdate = await updateAgent(
{ id: agentId },
{
support_contact: {
name: 'New Name',
email: '',
},
},
);
// Should not create a new version since content is the same
expect(duplicateUpdate.versions).toHaveLength(2);
expect(duplicateUpdate.version).toBe(2);
expect(duplicateUpdate.support_contact.name).toBe('New Name');
expect(duplicateUpdate.support_contact.email).toBe('');
});
// Edge Cases
describe.each([
{
operation: 'add',
name: 'empty file_id',
needsAgent: true,
params: { tool_resource: 'file_search', file_id: '' },
shouldResolve: true,
},
{
operation: 'add',
name: 'non-existent agent',
needsAgent: false,
params: { tool_resource: 'file_search', file_id: 'file123' },
shouldResolve: false,
error: 'Agent not found for adding resource file',
},
])('addAgentResourceFile with $name', ({ needsAgent, params, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
await expect(addAgentResourceFile({ agent_id, ...params })).resolves.toBeDefined();
} else {
await expect(addAgentResourceFile({ agent_id, ...params })).rejects.toThrow(error);
}
});
});
describe.each([
{
name: 'empty files array',
files: [],
needsAgent: true,
shouldResolve: true,
},
{
name: 'non-existent tool_resource',
files: [{ tool_resource: 'non_existent_tool', file_id: 'file123' }],
needsAgent: true,
shouldResolve: true,
},
{
name: 'non-existent agent',
files: [{ tool_resource: 'file_search', file_id: 'file123' }],
needsAgent: false,
shouldResolve: false,
error: 'Agent not found for removing resource files',
},
])('removeAgentResourceFiles with $name', ({ files, needsAgent, shouldResolve, error }) => {
test(`should ${shouldResolve ? 'resolve' : 'reject'}`, async () => {
const agent = needsAgent ? await createBasicAgent() : null;
const agent_id = needsAgent ? agent.id : `agent_${uuidv4()}`;
if (shouldResolve) {
const result = await removeAgentResourceFiles({ agent_id, files });
expect(result).toBeDefined();
if (agent) {
expect(result.id).toBe(agent.id);
}
} else {
await expect(removeAgentResourceFiles({ agent_id, files })).rejects.toThrow(error);
}
});
});
describe('Edge Cases', () => {
test('should handle extremely large version history', async () => {
const agentId = `agent_${uuidv4()}`;
@@ -2004,7 +1633,7 @@ describe('models/Agent', () => {
expect(result.version).toBe(1);
});
test('should return agent even when user is not author (permissions checked at route level)', async () => {
test('should return null when user is not author and agent has no projectIds', async () => {
const authorId = new mongoose.Types.ObjectId();
const userId = new mongoose.Types.ObjectId();
const agentId = `agent_${uuidv4()}`;
@@ -2025,11 +1654,7 @@ describe('models/Agent', () => {
model_parameters: { model: 'gpt-4' },
});
// With the new permission system, loadAgent returns the agent regardless of permissions
// Permission checks are handled at the route level via middleware
expect(result).toBeTruthy();
expect(result.id).toBe(agentId);
expect(result.name).toBe('Test Agent');
expect(result).toBeFalsy();
});
test('should handle ephemeral agent with no MCP servers', async () => {
@@ -2137,7 +1762,7 @@ describe('models/Agent', () => {
}
});
test('should return agent from different project (permissions checked at route level)', async () => {
test('should handle loadAgent with agent from different project', async () => {
const authorId = new mongoose.Types.ObjectId();
const userId = new mongoose.Types.ObjectId();
const agentId = `agent_${uuidv4()}`;
@@ -2160,11 +1785,7 @@ describe('models/Agent', () => {
model_parameters: { model: 'gpt-4' },
});
// With the new permission system, loadAgent returns the agent regardless of permissions
// Permission checks are handled at the route level via middleware
expect(result).toBeTruthy();
expect(result.id).toBe(agentId);
expect(result.name).toBe('Project Agent');
expect(result).toBeFalsy();
});
});
});
@@ -2779,18 +2400,11 @@ describe('models/Agent', () => {
agent_ids: ['agent1', 'agent2'],
});
const updatedAgent = await updateAgent(
{ id: agentId },
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
);
expect(updatedAgent.versions).toHaveLength(2);
await updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] });
// Update with same agent_ids should succeed but not create a new version
const duplicateUpdate = await updateAgent(
{ id: agentId },
{ agent_ids: ['agent1', 'agent2', 'agent3'] },
);
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
await expect(
updateAgent({ id: agentId }, { agent_ids: ['agent1', 'agent2', 'agent3'] }),
).rejects.toThrow('Duplicate version');
});
test('should handle agent_ids field alongside other fields', async () => {
@@ -2929,10 +2543,9 @@ describe('models/Agent', () => {
expect(updated.versions).toHaveLength(2);
expect(updated.agent_ids).toEqual([]);
// Update with same empty agent_ids should succeed but not create a new version
const duplicateUpdate = await updateAgent({ id: agentId }, { agent_ids: [] });
expect(duplicateUpdate.versions).toHaveLength(2); // No new version created
expect(duplicateUpdate.agent_ids).toEqual([]);
await expect(updateAgent({ id: agentId }, { agent_ids: [] })).rejects.toThrow(
'Duplicate version',
);
});
test('should handle agent without agent_ids field', async () => {
@@ -2957,299 +2570,6 @@ describe('models/Agent', () => {
});
});
describe('Support Contact Field', () => {
let mongoServer;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
await mongoose.connect(mongoUri);
}, 20000);
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
await Agent.deleteMany({});
});
it('should not create subdocument with ObjectId for support_contact', async () => {
const userId = new mongoose.Types.ObjectId();
const agentData = {
id: 'agent_test_support',
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: userId,
support_contact: {
name: 'Support Team',
email: 'support@example.com',
},
};
// Create agent
const agent = await createAgent(agentData);
// Verify support_contact is stored correctly
expect(agent.support_contact).toBeDefined();
expect(agent.support_contact.name).toBe('Support Team');
expect(agent.support_contact.email).toBe('support@example.com');
// Verify no _id field is created in support_contact
expect(agent.support_contact._id).toBeUndefined();
// Fetch from database to double-check
const dbAgent = await Agent.findOne({ id: agentData.id });
expect(dbAgent.support_contact).toBeDefined();
expect(dbAgent.support_contact.name).toBe('Support Team');
expect(dbAgent.support_contact.email).toBe('support@example.com');
expect(dbAgent.support_contact._id).toBeUndefined();
});
it('should handle empty support_contact correctly', async () => {
const userId = new mongoose.Types.ObjectId();
const agentData = {
id: 'agent_test_empty_support',
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: userId,
support_contact: {},
};
const agent = await createAgent(agentData);
// Verify empty support_contact is stored as empty object
expect(agent.support_contact).toEqual({});
expect(agent.support_contact._id).toBeUndefined();
});
it('should handle missing support_contact correctly', async () => {
const userId = new mongoose.Types.ObjectId();
const agentData = {
id: 'agent_test_no_support',
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: userId,
};
const agent = await createAgent(agentData);
// Verify support_contact is undefined when not provided
expect(agent.support_contact).toBeUndefined();
});
describe('getListAgentsByAccess - Security Tests', () => {
let userA, userB;
let agentA1, agentA2, agentA3;
beforeEach(async () => {
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
await Agent.deleteMany({});
await AclEntry.deleteMany({});
// Create two users
userA = new mongoose.Types.ObjectId();
userB = new mongoose.Types.ObjectId();
// Create agents for user A
agentA1 = await createAgent({
id: `agent_${uuidv4().slice(0, 12)}`,
name: 'Agent A1',
description: 'User A agent 1',
provider: 'openai',
model: 'gpt-4',
author: userA,
});
agentA2 = await createAgent({
id: `agent_${uuidv4().slice(0, 12)}`,
name: 'Agent A2',
description: 'User A agent 2',
provider: 'openai',
model: 'gpt-4',
author: userA,
});
agentA3 = await createAgent({
id: `agent_${uuidv4().slice(0, 12)}`,
name: 'Agent A3',
description: 'User A agent 3',
provider: 'openai',
model: 'gpt-4',
author: userA,
});
});
test('should return empty list when user has no accessible agents (empty accessibleIds)', async () => {
// User B has no agents and no shared agents
const result = await getListAgentsByAccess({
accessibleIds: [],
otherParams: {},
});
expect(result.data).toHaveLength(0);
expect(result.has_more).toBe(false);
expect(result.first_id).toBeNull();
expect(result.last_id).toBeNull();
});
test('should not return other users agents when accessibleIds is empty', async () => {
// User B trying to list agents with empty accessibleIds should not see User A's agents
const result = await getListAgentsByAccess({
accessibleIds: [],
otherParams: { author: userB },
});
expect(result.data).toHaveLength(0);
expect(result.has_more).toBe(false);
});
test('should only return agents in accessibleIds list', async () => {
// Give User B access to only one of User A's agents
const accessibleIds = [agentA1._id];
const result = await getListAgentsByAccess({
accessibleIds,
otherParams: {},
});
expect(result.data).toHaveLength(1);
expect(result.data[0].id).toBe(agentA1.id);
expect(result.data[0].name).toBe('Agent A1');
});
test('should return multiple accessible agents when provided', async () => {
// Give User B access to two of User A's agents
const accessibleIds = [agentA1._id, agentA3._id];
const result = await getListAgentsByAccess({
accessibleIds,
otherParams: {},
});
expect(result.data).toHaveLength(2);
const returnedIds = result.data.map((agent) => agent.id);
expect(returnedIds).toContain(agentA1.id);
expect(returnedIds).toContain(agentA3.id);
expect(returnedIds).not.toContain(agentA2.id);
});
test('should respect other query parameters while enforcing accessibleIds', async () => {
// Give access to all agents but filter by name
const accessibleIds = [agentA1._id, agentA2._id, agentA3._id];
const result = await getListAgentsByAccess({
accessibleIds,
otherParams: { name: 'Agent A2' },
});
expect(result.data).toHaveLength(1);
expect(result.data[0].id).toBe(agentA2.id);
});
test('should handle pagination correctly with accessibleIds filter', async () => {
// Create more agents
const moreAgents = [];
for (let i = 4; i <= 10; i++) {
const agent = await createAgent({
id: `agent_${uuidv4().slice(0, 12)}`,
name: `Agent A${i}`,
description: `User A agent ${i}`,
provider: 'openai',
model: 'gpt-4',
author: userA,
});
moreAgents.push(agent);
}
// Give access to all agents
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
// First page
const page1 = await getListAgentsByAccess({
accessibleIds: allAgentIds,
otherParams: {},
limit: 5,
});
expect(page1.data).toHaveLength(5);
expect(page1.has_more).toBe(true);
expect(page1.after).toBeTruthy();
// Second page
const page2 = await getListAgentsByAccess({
accessibleIds: allAgentIds,
otherParams: {},
limit: 5,
after: page1.after,
});
expect(page2.data).toHaveLength(5);
expect(page2.has_more).toBe(false);
// Verify no overlap between pages
const page1Ids = page1.data.map((a) => a.id);
const page2Ids = page2.data.map((a) => a.id);
const intersection = page1Ids.filter((id) => page2Ids.includes(id));
expect(intersection).toHaveLength(0);
});
test('should return empty list when accessibleIds contains non-existent IDs', async () => {
// Try with non-existent agent IDs
const fakeIds = [new mongoose.Types.ObjectId(), new mongoose.Types.ObjectId()];
const result = await getListAgentsByAccess({
accessibleIds: fakeIds,
otherParams: {},
});
expect(result.data).toHaveLength(0);
expect(result.has_more).toBe(false);
});
test('should handle undefined accessibleIds as empty array', async () => {
// When accessibleIds is undefined, it should be treated as empty array
const result = await getListAgentsByAccess({
accessibleIds: undefined,
otherParams: {},
});
expect(result.data).toHaveLength(0);
expect(result.has_more).toBe(false);
});
test('should combine accessibleIds with author filter correctly', async () => {
// Create an agent for User B
const agentB1 = await createAgent({
id: `agent_${uuidv4().slice(0, 12)}`,
name: 'Agent B1',
description: 'User B agent 1',
provider: 'openai',
model: 'gpt-4',
author: userB,
});
// Give User B access to one of User A's agents
const accessibleIds = [agentA1._id, agentB1._id];
// Filter by author should further restrict the results
const result = await getListAgentsByAccess({
accessibleIds,
otherParams: { author: userB },
});
expect(result.data).toHaveLength(1);
expect(result.data[0].id).toBe(agentB1.id);
expect(result.data[0].author).toBe(userB.toString());
});
});
});
function createBasicAgent(overrides = {}) {
const defaults = {
id: `agent_${uuidv4()}`,

View File

@@ -1,5 +1,7 @@
const { logger } = require('@librechat/data-schemas');
const { EToolResources, FileContext } = require('librechat-data-provider');
const { EToolResources, FileContext, Constants } = require('librechat-data-provider');
const { getProjectByName } = require('./Project');
const { getAgent } = require('./Agent');
const { File } = require('~/db/models');
/**
@@ -12,17 +14,124 @@ const findFileById = async (file_id, options = {}) => {
return await File.findOne({ file_id, ...options }).lean();
};
/**
* Checks if a user has access to multiple files through a shared agent (batch operation)
* @param {string} userId - The user ID to check access for
* @param {string[]} fileIds - Array of file IDs to check
* @param {string} agentId - The agent ID that might grant access
* @returns {Promise<Map<string, boolean>>} Map of fileId to access status
*/
const hasAccessToFilesViaAgent = async (userId, fileIds, agentId, checkCollaborative = true) => {
const accessMap = new Map();
// Initialize all files as no access
fileIds.forEach((fileId) => accessMap.set(fileId, false));
try {
const agent = await getAgent({ id: agentId });
if (!agent) {
return accessMap;
}
// Check if user is the author - if so, grant access to all files
if (agent.author.toString() === userId) {
fileIds.forEach((fileId) => accessMap.set(fileId, true));
return accessMap;
}
// Check if agent is shared with the user via projects
if (!agent.projectIds || agent.projectIds.length === 0) {
return accessMap;
}
// Check if agent is in global project
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, '_id');
if (
!globalProject ||
!agent.projectIds.some((pid) => pid.toString() === globalProject._id.toString())
) {
return accessMap;
}
// Agent is globally shared - check if it's collaborative
if (checkCollaborative && !agent.isCollaborative) {
return accessMap;
}
// Check which files are actually attached
const attachedFileIds = new Set();
if (agent.tool_resources) {
for (const [_resourceType, resource] of Object.entries(agent.tool_resources)) {
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
resource.file_ids.forEach((fileId) => attachedFileIds.add(fileId));
}
}
}
// Grant access only to files that are attached to this agent
fileIds.forEach((fileId) => {
if (attachedFileIds.has(fileId)) {
accessMap.set(fileId, true);
}
});
return accessMap;
} catch (error) {
logger.error('[hasAccessToFilesViaAgent] Error checking file access:', error);
return accessMap;
}
};
/**
* Retrieves files matching a given filter, sorted by the most recently updated.
* @param {Object} filter - The filter criteria to apply.
* @param {Object} [_sortOptions] - Optional sort parameters.
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
* Default excludes the 'text' field.
* @param {Object} [options] - Additional options
* @param {string} [options.userId] - User ID for access control
* @param {string} [options.agentId] - Agent ID that might grant access to files
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
*/
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }, options = {}) => {
const sortOptions = { updatedAt: -1, ..._sortOptions };
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
const files = await File.find(filter).select(selectFields).sort(sortOptions).lean();
// If userId and agentId are provided, filter files based on access
if (options.userId && options.agentId) {
// Collect file IDs that need access check
const filesToCheck = [];
const ownedFiles = [];
for (const file of files) {
if (file.user && file.user.toString() === options.userId) {
ownedFiles.push(file);
} else {
filesToCheck.push(file);
}
}
if (filesToCheck.length === 0) {
return ownedFiles;
}
// Batch check access for all non-owned files
const fileIds = filesToCheck.map((f) => f.file_id);
const accessMap = await hasAccessToFilesViaAgent(
options.userId,
fileIds,
options.agentId,
false,
);
// Filter files based on access
const accessibleFiles = filesToCheck.filter((file) => accessMap.get(file.file_id));
return [...ownedFiles, ...accessibleFiles];
}
return files;
};
/**
@@ -176,4 +285,5 @@ module.exports = {
deleteFiles,
deleteFileByFilter,
batchUpdateFiles,
hasAccessToFilesViaAgent,
};

View File

@@ -1,23 +1,17 @@
const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { createModels } = require('@librechat/data-schemas');
const { fileSchema } = require('@librechat/data-schemas');
const { agentSchema } = require('@librechat/data-schemas');
const { projectSchema } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
SystemRoles,
ResourceType,
AccessRoleIds,
PrincipalType,
} = require('librechat-data-provider');
const { grantPermission } = require('~/server/services/PermissionService');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
const { getFiles, createFile } = require('./File');
const { seedDefaultRoles } = require('~/models');
const { getProjectByName } = require('./Project');
const { createAgent } = require('./Agent');
let File;
let Agent;
let AclEntry;
let User;
let modelsToCleanup = [];
let Project;
describe('File Access Control', () => {
let mongoServer;
@@ -25,41 +19,13 @@ describe('File Access Control', () => {
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
File = mongoose.models.File || mongoose.model('File', fileSchema);
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
Project = mongoose.models.Project || mongoose.model('Project', projectSchema);
await mongoose.connect(mongoUri);
// Initialize all models
const models = createModels(mongoose);
// Track which models we're adding
modelsToCleanup = Object.keys(models);
// Register models on mongoose.models so methods can access them
const dbModels = require('~/db/models');
Object.assign(mongoose.models, dbModels);
File = dbModels.File;
Agent = dbModels.Agent;
AclEntry = dbModels.AclEntry;
User = dbModels.User;
// Seed default roles
await seedDefaultRoles();
});
afterAll(async () => {
// Clean up all collections before disconnecting
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
// Clear only the models we added
for (const modelName of modelsToCleanup) {
if (mongoose.models[modelName]) {
delete mongoose.models[modelName];
}
}
await mongoose.disconnect();
await mongoServer.stop();
});
@@ -67,33 +33,16 @@ describe('File Access Control', () => {
beforeEach(async () => {
await File.deleteMany({});
await Agent.deleteMany({});
await AclEntry.deleteMany({});
await User.deleteMany({});
// Don't delete AccessRole as they are seeded defaults needed for tests
await Project.deleteMany({});
});
describe('hasAccessToFilesViaAgent', () => {
it('should efficiently check access for multiple files at once', async () => {
const userId = new mongoose.Types.ObjectId();
const authorId = new mongoose.Types.ObjectId();
const userId = new mongoose.Types.ObjectId().toString();
const authorId = new mongoose.Types.ObjectId().toString();
const agentId = uuidv4();
const fileIds = [uuidv4(), uuidv4(), uuidv4(), uuidv4()];
// Create users
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
});
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create files
for (const fileId of fileIds) {
await createFile({
@@ -105,12 +54,13 @@ describe('File Access Control', () => {
}
// Create agent with only first two files attached
const agent = await createAgent({
await createAgent({
id: agentId,
name: 'Test Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
isCollaborative: true,
tool_resources: {
file_search: {
file_ids: [fileIds[0], fileIds[1]],
@@ -118,24 +68,15 @@ describe('File Access Control', () => {
},
});
// Grant EDIT permission to user on the agent
await grantPermission({
principalType: PrincipalType.USER,
principalId: userId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
// Get or create global project
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
// Share agent globally
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
// Check access for all files
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
const accessMap = await hasAccessToFilesViaAgent({
userId: userId,
role: SystemRoles.USER,
fileIds,
agentId: agent.id, // Use agent.id which is the custom UUID
});
const { hasAccessToFilesViaAgent } = require('./File');
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
// Should have access only to the first two files
expect(accessMap.get(fileIds[0])).toBe(true);
@@ -145,18 +86,10 @@ describe('File Access Control', () => {
});
it('should grant access to all files when user is the agent author', async () => {
const authorId = new mongoose.Types.ObjectId();
const authorId = new mongoose.Types.ObjectId().toString();
const agentId = uuidv4();
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
// Create author user
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create agent
await createAgent({
id: agentId,
@@ -172,13 +105,8 @@ describe('File Access Control', () => {
});
// Check access as the author
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
const accessMap = await hasAccessToFilesViaAgent({
userId: authorId,
role: SystemRoles.USER,
fileIds,
agentId,
});
const { hasAccessToFilesViaAgent } = require('./File');
const accessMap = await hasAccessToFilesViaAgent(authorId, fileIds, agentId);
// Author should have access to all files
expect(accessMap.get(fileIds[0])).toBe(true);
@@ -187,58 +115,31 @@ describe('File Access Control', () => {
});
it('should handle non-existent agent gracefully', async () => {
const userId = new mongoose.Types.ObjectId();
const userId = new mongoose.Types.ObjectId().toString();
const fileIds = [uuidv4(), uuidv4()];
// Create user
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
});
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
const accessMap = await hasAccessToFilesViaAgent({
userId: userId,
role: SystemRoles.USER,
fileIds,
agentId: 'non-existent-agent',
});
const { hasAccessToFilesViaAgent } = require('./File');
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, 'non-existent-agent');
// Should have no access to any files
expect(accessMap.get(fileIds[0])).toBe(false);
expect(accessMap.get(fileIds[1])).toBe(false);
});
it('should deny access when user only has VIEW permission', async () => {
const userId = new mongoose.Types.ObjectId();
const authorId = new mongoose.Types.ObjectId();
it('should deny access when agent is not collaborative', async () => {
const userId = new mongoose.Types.ObjectId().toString();
const authorId = new mongoose.Types.ObjectId().toString();
const agentId = uuidv4();
const fileIds = [uuidv4(), uuidv4()];
// Create users
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
});
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create agent with files
const agent = await createAgent({
// Create agent with files but isCollaborative: false
await createAgent({
id: agentId,
name: 'View-Only Agent',
name: 'Non-Collaborative Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
isCollaborative: false,
tool_resources: {
file_search: {
file_ids: fileIds,
@@ -246,26 +147,17 @@ describe('File Access Control', () => {
},
});
// Grant only VIEW permission to user on the agent
await grantPermission({
principalType: PrincipalType.USER,
principalId: userId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_VIEWER,
grantedBy: authorId,
});
// Get or create global project
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
// Share agent globally
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
// Check access for files
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
const accessMap = await hasAccessToFilesViaAgent({
userId: userId,
role: SystemRoles.USER,
fileIds,
agentId,
});
const { hasAccessToFilesViaAgent } = require('./File');
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
// Should have no access to any files when only VIEW permission
// Should have no access to any files when isCollaborative is false
expect(accessMap.get(fileIds[0])).toBe(false);
expect(accessMap.get(fileIds[1])).toBe(false);
});
@@ -280,28 +172,18 @@ describe('File Access Control', () => {
const sharedFileId = `file_${uuidv4()}`;
const inaccessibleFileId = `file_${uuidv4()}`;
// Create users
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
});
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create/get global project using getProjectByName which will upsert
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME);
// Create agent with shared file
const agent = await createAgent({
await createAgent({
id: agentId,
name: 'Shared Agent',
provider: 'test',
model: 'test-model',
author: authorId,
projectIds: [globalProject._id],
isCollaborative: true,
tool_resources: {
file_search: {
file_ids: [sharedFileId],
@@ -309,16 +191,6 @@ describe('File Access Control', () => {
},
});
// Grant EDIT permission to user on the agent
await grantPermission({
principalType: PrincipalType.USER,
principalId: userId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
// Create files
await createFile({
file_id: ownedFileId,
@@ -348,22 +220,14 @@ describe('File Access Control', () => {
bytes: 300,
});
// Get all files first
const allFiles = await getFiles(
// Get files with access control
const files = await getFiles(
{ file_id: { $in: [ownedFileId, sharedFileId, inaccessibleFileId] } },
null,
{ text: 0 },
{ userId: userId.toString(), agentId },
);
// Then filter by access control
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
const files = await filterFilesByAgentAccess({
files: allFiles,
userId: userId,
role: SystemRoles.USER,
agentId,
});
expect(files).toHaveLength(2);
expect(files.map((f) => f.file_id)).toContain(ownedFileId);
expect(files.map((f) => f.file_id)).toContain(sharedFileId);
@@ -397,166 +261,4 @@ describe('File Access Control', () => {
expect(files).toHaveLength(2);
});
});
describe('Role-based file permissions', () => {
it('should optimize permission checks when role is provided', async () => {
const userId = new mongoose.Types.ObjectId();
const authorId = new mongoose.Types.ObjectId();
const agentId = uuidv4();
const fileIds = [uuidv4(), uuidv4()];
// Create users
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
role: 'ADMIN', // User has ADMIN role
});
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create files
for (const fileId of fileIds) {
await createFile({
file_id: fileId,
user: authorId,
filename: `${fileId}.txt`,
filepath: `/uploads/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
});
}
// Create agent with files
const agent = await createAgent({
id: agentId,
name: 'Test Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
tool_resources: {
file_search: {
file_ids: fileIds,
},
},
});
// Grant permission to ADMIN role
await grantPermission({
principalType: PrincipalType.ROLE,
principalId: 'ADMIN',
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
// Check access with role provided (should avoid DB query)
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
const accessMapWithRole = await hasAccessToFilesViaAgent({
userId: userId,
role: 'ADMIN',
fileIds,
agentId: agent.id,
});
// User should have access through their ADMIN role
expect(accessMapWithRole.get(fileIds[0])).toBe(true);
expect(accessMapWithRole.get(fileIds[1])).toBe(true);
// Check access without role (will query DB to get user's role)
const accessMapWithoutRole = await hasAccessToFilesViaAgent({
userId: userId,
fileIds,
agentId: agent.id,
});
// Should have same result
expect(accessMapWithoutRole.get(fileIds[0])).toBe(true);
expect(accessMapWithoutRole.get(fileIds[1])).toBe(true);
});
it('should deny access when user role changes', async () => {
const userId = new mongoose.Types.ObjectId();
const authorId = new mongoose.Types.ObjectId();
const agentId = uuidv4();
const fileId = uuidv4();
// Create users
await User.create({
_id: userId,
email: 'user@example.com',
emailVerified: true,
provider: 'local',
role: 'EDITOR',
});
await User.create({
_id: authorId,
email: 'author@example.com',
emailVerified: true,
provider: 'local',
});
// Create file
await createFile({
file_id: fileId,
user: authorId,
filename: 'test.txt',
filepath: '/uploads/test.txt',
type: 'text/plain',
bytes: 100,
});
// Create agent
const agent = await createAgent({
id: agentId,
name: 'Test Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
tool_resources: {
file_search: {
file_ids: [fileId],
},
},
});
// Grant permission to EDITOR role only
await grantPermission({
principalType: PrincipalType.ROLE,
principalId: 'EDITOR',
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
// Check with EDITOR role - should have access
const accessAsEditor = await hasAccessToFilesViaAgent({
userId: userId,
role: 'EDITOR',
fileIds: [fileId],
agentId: agent.id,
});
expect(accessAsEditor.get(fileId)).toBe(true);
// Simulate role change to USER - should lose access
const accessAsUser = await hasAccessToFilesViaAgent({
userId: userId,
role: SystemRoles.USER,
fileIds: [fileId],
agentId: agent.id,
});
expect(accessAsUser.get(fileId)).toBe(false);
});
});
});

View File

@@ -1,18 +1,12 @@
const { ObjectId } = require('mongodb');
const { logger } = require('@librechat/data-schemas');
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
const {
Constants,
SystemRoles,
ResourceType,
SystemCategories,
} = require('librechat-data-provider');
const {
removeGroupFromAllProjects,
removeGroupIdsFromProject,
addGroupIdsToProject,
getProjectByName,
addGroupIdsToProject,
removeGroupIdsFromProject,
removeGroupFromAllProjects,
} = require('./Project');
const { removeAllPermissions } = require('~/server/services/PermissionService');
const { PromptGroup, Prompt } = require('~/db/models');
const { escapeRegExp } = require('~/server/utils');
@@ -106,6 +100,10 @@ const getAllPromptGroups = async (req, filter) => {
try {
const { name, ...query } = filter;
if (!query.author) {
throw new Error('Author is required');
}
let searchShared = true;
let searchSharedOnly = false;
if (name) {
@@ -155,6 +153,10 @@ const getPromptGroups = async (req, filter) => {
const validatedPageNumber = Math.max(parseInt(pageNumber, 10), 1);
const validatedPageSize = Math.max(parseInt(pageSize, 10), 1);
if (!query.author) {
throw new Error('Author is required');
}
let searchShared = true;
let searchSharedOnly = false;
if (name) {
@@ -219,16 +221,12 @@ const getPromptGroups = async (req, filter) => {
* @returns {Promise<TDeletePromptGroupResponse>}
*/
const deletePromptGroup = async ({ _id, author, role }) => {
// Build query - with ACL, author is optional
const query = { _id };
const groupQuery = { groupId: new ObjectId(_id) };
// Legacy: Add author filter if provided (backward compatibility)
if (author && role !== SystemRoles.ADMIN) {
query.author = author;
groupQuery.author = author;
const query = { _id, author };
const groupQuery = { groupId: new ObjectId(_id), author };
if (role === SystemRoles.ADMIN) {
delete query.author;
delete groupQuery.author;
}
const response = await PromptGroup.deleteOne(query);
if (!response || response.deletedCount === 0) {
@@ -237,140 +235,13 @@ const deletePromptGroup = async ({ _id, author, role }) => {
await Prompt.deleteMany(groupQuery);
await removeGroupFromAllProjects(_id);
try {
await removeAllPermissions({ resourceType: ResourceType.PROMPTGROUP, resourceId: _id });
} catch (error) {
logger.error('Error removing promptGroup permissions:', error);
}
return { message: 'Prompt group deleted successfully' };
};
/**
* Get prompt groups by accessible IDs with optional cursor-based pagination.
* @param {Object} params - The parameters for getting accessible prompt groups.
* @param {Array} [params.accessibleIds] - Array of prompt group ObjectIds the user has ACL access to.
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
* @param {number} [params.limit] - Number of prompt groups to return (max 100). If not provided, returns all prompt groups.
* @param {string} [params.after] - Cursor for pagination - get prompt groups after this cursor. // base64 encoded JSON string with updatedAt and _id.
* @returns {Promise<Object>} A promise that resolves to an object containing the prompt groups data and pagination info.
*/
async function getListPromptGroupsByAccess({
accessibleIds = [],
otherParams = {},
limit = null,
after = null,
}) {
const isPaginated = limit !== null && limit !== undefined;
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
// Build base query combining ACL accessible prompt groups with other filters
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
// Add cursor condition
if (after) {
try {
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
const { updatedAt, _id } = cursor;
const cursorCondition = {
$or: [
{ updatedAt: { $lt: new Date(updatedAt) } },
{ updatedAt: new Date(updatedAt), _id: { $gt: new ObjectId(_id) } },
],
};
// Merge cursor condition with base query
if (Object.keys(baseQuery).length > 0) {
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
// Remove the original conditions from baseQuery to avoid duplication
Object.keys(baseQuery).forEach((key) => {
if (key !== '$and') delete baseQuery[key];
});
} else {
Object.assign(baseQuery, cursorCondition);
}
} catch (error) {
logger.warn('Invalid cursor:', error.message);
}
}
// Build aggregation pipeline
const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
// Only apply limit if pagination is requested
if (isPaginated) {
pipeline.push({ $limit: normalizedLimit + 1 });
}
// Add lookup for production prompt
pipeline.push(
{
$lookup: {
from: 'prompts',
localField: 'productionId',
foreignField: '_id',
as: 'productionPrompt',
},
},
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
{
$project: {
name: 1,
numberOfGenerations: 1,
oneliner: 1,
category: 1,
projectIds: 1,
productionId: 1,
author: 1,
authorName: 1,
createdAt: 1,
updatedAt: 1,
'productionPrompt.prompt': 1,
},
},
);
const promptGroups = await PromptGroup.aggregate(pipeline).exec();
const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
(group) => {
if (group.author) {
group.author = group.author.toString();
}
return group;
},
);
// Generate next cursor only if paginated
let nextCursor = null;
if (isPaginated && hasMore && data.length > 0) {
const lastGroup = promptGroups[normalizedLimit - 1];
nextCursor = Buffer.from(
JSON.stringify({
updatedAt: lastGroup.updatedAt.toISOString(),
_id: lastGroup._id.toString(),
}),
).toString('base64');
}
return {
object: 'list',
data,
first_id: data.length > 0 ? data[0]._id.toString() : null,
last_id: data.length > 0 ? data[data.length - 1]._id.toString() : null,
has_more: hasMore,
after: nextCursor,
};
}
module.exports = {
getPromptGroups,
deletePromptGroup,
getAllPromptGroups,
getListPromptGroupsByAccess,
/**
* Create a prompt and its respective group
* @param {TCreatePromptRecord} saveData
@@ -559,16 +430,6 @@ module.exports = {
.lean();
if (remainingPrompts.length === 0) {
// Remove all ACL entries for the promptGroup when deleting the last prompt
try {
await removeAllPermissions({
resourceType: ResourceType.PROMPTGROUP,
resourceId: groupId,
});
} catch (error) {
logger.error('Error removing promptGroup permissions:', error);
}
await PromptGroup.deleteOne({ _id: groupId });
await removeGroupFromAllProjects(groupId);

View File

@@ -1,564 +0,0 @@
const mongoose = require('mongoose');
const { ObjectId } = require('mongodb');
const { logger } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
SystemRoles,
ResourceType,
AccessRoleIds,
PrincipalType,
PermissionBits,
} = require('librechat-data-provider');
// Mock the config/connect module to prevent connection attempts during tests
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
const dbModels = require('~/db/models');
// Disable console for tests
logger.silent = true;
let mongoServer;
let Prompt, PromptGroup, AclEntry, AccessRole, User, Group, Project;
let promptFns, permissionService;
let testUsers, testGroups, testRoles;
beforeAll(async () => {
// Set up MongoDB memory server
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
// Initialize models
Prompt = dbModels.Prompt;
PromptGroup = dbModels.PromptGroup;
AclEntry = dbModels.AclEntry;
AccessRole = dbModels.AccessRole;
User = dbModels.User;
Group = dbModels.Group;
Project = dbModels.Project;
promptFns = require('~/models/Prompt');
permissionService = require('~/server/services/PermissionService');
// Create test data
await setupTestData();
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
jest.clearAllMocks();
});
async function setupTestData() {
// Create access roles for promptGroups
testRoles = {
viewer: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
name: 'Viewer',
description: 'Can view promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW,
}),
editor: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
name: 'Editor',
description: 'Can view and edit promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
}),
owner: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
name: 'Owner',
description: 'Full control over promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits:
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
}),
};
// Create test users
testUsers = {
owner: await User.create({
name: 'Prompt Owner',
email: 'owner@example.com',
role: SystemRoles.USER,
}),
editor: await User.create({
name: 'Prompt Editor',
email: 'editor@example.com',
role: SystemRoles.USER,
}),
viewer: await User.create({
name: 'Prompt Viewer',
email: 'viewer@example.com',
role: SystemRoles.USER,
}),
admin: await User.create({
name: 'Admin User',
email: 'admin@example.com',
role: SystemRoles.ADMIN,
}),
noAccess: await User.create({
name: 'No Access User',
email: 'noaccess@example.com',
role: SystemRoles.USER,
}),
};
// Create test groups
testGroups = {
editors: await Group.create({
name: 'Prompt Editors',
description: 'Group with editor access',
}),
viewers: await Group.create({
name: 'Prompt Viewers',
description: 'Group with viewer access',
}),
};
await Project.create({
name: 'Global',
description: 'Global project',
promptGroupIds: [],
});
}
describe('Prompt ACL Permissions', () => {
describe('Creating Prompts with Permissions', () => {
it('should grant owner permissions when creating a prompt', async () => {
// First create a group
const testGroup = await PromptGroup.create({
name: 'Test Group',
category: 'testing',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new mongoose.Types.ObjectId(),
});
const promptData = {
prompt: {
prompt: 'Test prompt content',
name: 'Test Prompt',
type: 'text',
groupId: testGroup._id,
},
author: testUsers.owner._id,
};
await promptFns.savePrompt(promptData);
// Manually grant permissions as would happen in the route
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: testUsers.owner._id,
});
// Check ACL entry
const aclEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
});
expect(aclEntry).toBeTruthy();
expect(aclEntry.permBits).toBe(testRoles.owner.permBits);
});
});
describe('Accessing Prompts', () => {
let testPromptGroup;
beforeEach(async () => {
// Create a prompt group
testPromptGroup = await PromptGroup.create({
name: 'Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
// Create a prompt
await Prompt.create({
prompt: 'Test prompt for access control',
name: 'Access Test Prompt',
author: testUsers.owner._id,
groupId: testPromptGroup._id,
type: 'text',
});
// Grant owner permissions
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: testUsers.owner._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('owner should have full access to their prompt', async () => {
const hasAccess = await permissionService.checkPermission({
userId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.VIEW,
});
expect(hasAccess).toBe(true);
const canEdit = await permissionService.checkPermission({
userId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.EDIT,
});
expect(canEdit).toBe(true);
});
it('user with viewer role should only have view access', async () => {
// Grant viewer permissions
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.owner._id,
});
const canView = await permissionService.checkPermission({
userId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.VIEW,
});
const canEdit = await permissionService.checkPermission({
userId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.EDIT,
});
expect(canView).toBe(true);
expect(canEdit).toBe(false);
});
it('user without permissions should have no access', async () => {
const hasAccess = await permissionService.checkPermission({
userId: testUsers.noAccess._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.VIEW,
});
expect(hasAccess).toBe(false);
});
it('admin should have access regardless of permissions', async () => {
// Admin users should work through normal permission system
// The middleware layer handles admin bypass, not the permission service
const hasAccess = await permissionService.checkPermission({
userId: testUsers.admin._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.VIEW,
});
// Without explicit permissions, even admin won't have access at this layer
expect(hasAccess).toBe(false);
// The actual admin bypass happens in the middleware layer (`canAccessPromptViaGroup`/`canAccessPromptGroupResource`)
// which checks req.user.role === SystemRoles.ADMIN
});
});
describe('Group-based Access', () => {
let testPromptGroup;
beforeEach(async () => {
// Create a prompt group first
testPromptGroup = await PromptGroup.create({
name: 'Group Access Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
await Prompt.create({
prompt: 'Group access test prompt',
name: 'Group Test',
author: testUsers.owner._id,
groupId: testPromptGroup._id,
type: 'text',
});
// Add users to groups
await User.findByIdAndUpdate(testUsers.editor._id, {
$push: { groups: testGroups.editors._id },
});
await User.findByIdAndUpdate(testUsers.viewer._id, {
$push: { groups: testGroups.viewers._id },
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await AclEntry.deleteMany({});
await User.updateMany({}, { $set: { groups: [] } });
});
it('group members should inherit group permissions', async () => {
// Create a prompt group
const testPromptGroup = await PromptGroup.create({
name: 'Group Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
const { addUserToGroup } = require('~/models');
await addUserToGroup(testUsers.editor._id, testGroups.editors._id);
const prompt = await promptFns.savePrompt({
author: testUsers.owner._id,
prompt: {
prompt: 'Group test prompt',
name: 'Group Test',
groupId: testPromptGroup._id,
type: 'text',
},
});
// Check if savePrompt returned an error
if (!prompt || !prompt.prompt) {
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
}
// Grant edit permissions to the group
await permissionService.grantPermission({
principalType: PrincipalType.GROUP,
principalId: testGroups.editors._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
grantedBy: testUsers.owner._id,
});
// Check if group member has access
const hasAccess = await permissionService.checkPermission({
userId: testUsers.editor._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.EDIT,
});
expect(hasAccess).toBe(true);
// Check that non-member doesn't have access
const nonMemberAccess = await permissionService.checkPermission({
userId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
requiredPermission: PermissionBits.EDIT,
});
expect(nonMemberAccess).toBe(false);
});
});
describe('Public Access', () => {
let publicPromptGroup, privatePromptGroup;
beforeEach(async () => {
// Create separate prompt groups for public and private access
publicPromptGroup = await PromptGroup.create({
name: 'Public Access Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
privatePromptGroup = await PromptGroup.create({
name: 'Private Access Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
// Create prompts in their respective groups
await Prompt.create({
prompt: 'Public prompt',
name: 'Public',
author: testUsers.owner._id,
groupId: publicPromptGroup._id,
type: 'text',
});
await Prompt.create({
prompt: 'Private prompt',
name: 'Private',
author: testUsers.owner._id,
groupId: privatePromptGroup._id,
type: 'text',
});
// Grant public view access to publicPromptGroup
await permissionService.grantPermission({
principalType: PrincipalType.PUBLIC,
principalId: null,
resourceType: ResourceType.PROMPTGROUP,
resourceId: publicPromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.owner._id,
});
// Grant only owner access to privatePromptGroup
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: privatePromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: testUsers.owner._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('public prompt should be accessible to any user', async () => {
const hasAccess = await permissionService.checkPermission({
userId: testUsers.noAccess._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: publicPromptGroup._id,
requiredPermission: PermissionBits.VIEW,
includePublic: true,
});
expect(hasAccess).toBe(true);
});
it('private prompt should not be accessible to unauthorized users', async () => {
const hasAccess = await permissionService.checkPermission({
userId: testUsers.noAccess._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: privatePromptGroup._id,
requiredPermission: PermissionBits.VIEW,
includePublic: true,
});
expect(hasAccess).toBe(false);
});
});
describe('Prompt Deletion', () => {
let testPromptGroup;
it('should remove ACL entries when prompt is deleted', async () => {
testPromptGroup = await PromptGroup.create({
name: 'Deletion Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
const prompt = await promptFns.savePrompt({
author: testUsers.owner._id,
prompt: {
prompt: 'To be deleted',
name: 'Delete Test',
groupId: testPromptGroup._id,
type: 'text',
},
});
// Check if savePrompt returned an error
if (!prompt || !prompt.prompt) {
throw new Error(`Failed to save prompt: ${prompt?.message || 'Unknown error'}`);
}
const testPromptId = prompt.prompt._id;
const promptGroupId = testPromptGroup._id;
// Grant permission
await permissionService.grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: testUsers.owner._id,
});
// Verify ACL entry exists
const beforeDelete = await AclEntry.find({
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
});
expect(beforeDelete).toHaveLength(1);
// Delete the prompt
await promptFns.deletePrompt({
promptId: testPromptId,
groupId: promptGroupId,
author: testUsers.owner._id,
role: SystemRoles.USER,
});
// Verify ACL entries are removed
const aclEntries = await AclEntry.find({
resourceType: ResourceType.PROMPTGROUP,
resourceId: testPromptGroup._id,
});
expect(aclEntries).toHaveLength(0);
});
});
describe('Backwards Compatibility', () => {
it('should handle prompts without ACL entries gracefully', async () => {
// Create a prompt group first
const promptGroup = await PromptGroup.create({
name: 'Legacy Test Group',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
// Create a prompt without ACL entries (legacy prompt)
const legacyPrompt = await Prompt.create({
prompt: 'Legacy prompt without ACL',
name: 'Legacy',
author: testUsers.owner._id,
groupId: promptGroup._id,
type: 'text',
});
// The system should handle this gracefully
const prompt = await promptFns.getPrompt({ _id: legacyPrompt._id });
expect(prompt).toBeTruthy();
expect(prompt._id.toString()).toBe(legacyPrompt._id.toString());
});
});
});

View File

@@ -1,280 +0,0 @@
const mongoose = require('mongoose');
const { ObjectId } = require('mongodb');
const { logger } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
Constants,
ResourceType,
AccessRoleIds,
PrincipalType,
PrincipalModel,
PermissionBits,
} = require('librechat-data-provider');
// Mock the config/connect module to prevent connection attempts during tests
jest.mock('../../config/connect', () => jest.fn().mockResolvedValue(true));
// Disable console for tests
logger.silent = true;
describe('PromptGroup Migration Script', () => {
let mongoServer;
let Prompt, PromptGroup, AclEntry, AccessRole, User, Project;
let migrateToPromptGroupPermissions;
let testOwner, testProject;
let ownerRole, viewerRole;
beforeAll(async () => {
// Set up MongoDB memory server
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
// Initialize models
const dbModels = require('~/db/models');
Prompt = dbModels.Prompt;
PromptGroup = dbModels.PromptGroup;
AclEntry = dbModels.AclEntry;
AccessRole = dbModels.AccessRole;
User = dbModels.User;
Project = dbModels.Project;
// Create test user
testOwner = await User.create({
name: 'Test Owner',
email: 'owner@test.com',
role: 'USER',
});
// Create test project with the proper name
const projectName = Constants.GLOBAL_PROJECT_NAME || 'instance';
testProject = await Project.create({
name: projectName,
description: 'Global project',
promptGroupIds: [],
});
// Create promptGroup access roles
ownerRole = await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
name: 'Owner',
description: 'Full control over promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits:
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
});
viewerRole = await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
name: 'Viewer',
description: 'Can view promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW,
});
await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
name: 'Editor',
description: 'Can view and edit promptGroups',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
});
// Import migration function
const migration = require('../../config/migrate-prompt-permissions');
migrateToPromptGroupPermissions = migration.migrateToPromptGroupPermissions;
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
// Clean up before each test
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
// Reset the project's promptGroupIds array
testProject.promptGroupIds = [];
await testProject.save();
});
it('should categorize promptGroups correctly in dry run', async () => {
// Create global prompt group (in Global project)
const globalPromptGroup = await PromptGroup.create({
name: 'Global Group',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
// Create private prompt group (not in any project)
await PromptGroup.create({
name: 'Private Group',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
// Add global group to project's promptGroupIds array
testProject.promptGroupIds = [globalPromptGroup._id];
await testProject.save();
const result = await migrateToPromptGroupPermissions({ dryRun: true });
expect(result.dryRun).toBe(true);
expect(result.summary.total).toBe(2);
expect(result.summary.globalViewAccess).toBe(1);
expect(result.summary.privateGroups).toBe(1);
});
it('should grant appropriate permissions during migration', async () => {
// Create prompt groups
const globalPromptGroup = await PromptGroup.create({
name: 'Global Group',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
const privatePromptGroup = await PromptGroup.create({
name: 'Private Group',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
// Add global group to project's promptGroupIds array
testProject.promptGroupIds = [globalPromptGroup._id];
await testProject.save();
const result = await migrateToPromptGroupPermissions({ dryRun: false });
expect(result.migrated).toBe(2);
expect(result.errors).toBe(0);
expect(result.ownerGrants).toBe(2);
expect(result.publicViewGrants).toBe(1);
// Check global promptGroup permissions
const globalOwnerEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: globalPromptGroup._id,
principalType: PrincipalType.USER,
principalId: testOwner._id,
});
expect(globalOwnerEntry).toBeTruthy();
expect(globalOwnerEntry.permBits).toBe(ownerRole.permBits);
const globalPublicEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: globalPromptGroup._id,
principalType: PrincipalType.PUBLIC,
});
expect(globalPublicEntry).toBeTruthy();
expect(globalPublicEntry.permBits).toBe(viewerRole.permBits);
// Check private promptGroup permissions
const privateOwnerEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: privatePromptGroup._id,
principalType: PrincipalType.USER,
principalId: testOwner._id,
});
expect(privateOwnerEntry).toBeTruthy();
expect(privateOwnerEntry.permBits).toBe(ownerRole.permBits);
const privatePublicEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: privatePromptGroup._id,
principalType: PrincipalType.PUBLIC,
});
expect(privatePublicEntry).toBeNull();
});
it('should skip promptGroups that already have ACL entries', async () => {
// Create prompt groups
const promptGroup1 = await PromptGroup.create({
name: 'Group 1',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
const promptGroup2 = await PromptGroup.create({
name: 'Group 2',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
// Grant permission to one promptGroup manually (simulating it already has ACL)
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testOwner._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.PROMPTGROUP,
resourceId: promptGroup1._id,
permBits: ownerRole.permBits,
roleId: ownerRole._id,
grantedBy: testOwner._id,
grantedAt: new Date(),
});
const result = await migrateToPromptGroupPermissions({ dryRun: false });
// Should only migrate promptGroup2, skip promptGroup1
expect(result.migrated).toBe(1);
expect(result.errors).toBe(0);
// Verify promptGroup2 now has permissions
const group2Entry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: promptGroup2._id,
});
expect(group2Entry).toBeTruthy();
});
it('should handle promptGroups with prompts correctly', async () => {
// Create a promptGroup with some prompts
const promptGroup = await PromptGroup.create({
name: 'Group with Prompts',
author: testOwner._id,
authorName: testOwner.name,
productionId: new ObjectId(),
});
// Create some prompts in this group
await Prompt.create({
prompt: 'First prompt',
author: testOwner._id,
groupId: promptGroup._id,
type: 'text',
});
await Prompt.create({
prompt: 'Second prompt',
author: testOwner._id,
groupId: promptGroup._id,
type: 'text',
});
const result = await migrateToPromptGroupPermissions({ dryRun: false });
expect(result.migrated).toBe(1);
expect(result.errors).toBe(0);
// Verify the promptGroup has permissions
const groupEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: promptGroup._id,
});
expect(groupEntry).toBeTruthy();
// Verify no prompt-level permissions were created
const promptEntries = await AclEntry.find({
resourceType: 'prompt',
});
expect(promptEntries).toHaveLength(0);
});
});

View File

@@ -2,6 +2,7 @@ const {
CacheKeys,
SystemRoles,
roleDefaults,
PermissionTypes,
permissionsSchema,
removeNullishValues,
} = require('librechat-data-provider');
@@ -16,7 +17,7 @@ const { Role } = require('~/db/models');
*
* @param {string} roleName - The name of the role to find or create.
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
* @returns {Promise<IRole>} Role document.
* @returns {Promise<Object>} A plain object representing the role document.
*/
const getRoleByName = async function (roleName, fieldsToSelect = null) {
const cache = getLogStores(CacheKeys.ROLES);
@@ -72,9 +73,8 @@ const updateRoleByName = async function (roleName, updates) {
* Updates access permissions for a specific role and multiple permission types.
* @param {string} roleName - The role to update.
* @param {Object.<PermissionTypes, Object.<Permissions, boolean>>} permissionsUpdate - Permissions to update and their values.
* @param {IRole} [roleData] - Optional role data to use instead of fetching from the database.
*/
async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
async function updateAccessPermissions(roleName, permissionsUpdate) {
// Filter and clean the permission updates based on our schema definition.
const updates = {};
for (const [permissionType, permissions] of Object.entries(permissionsUpdate)) {
@@ -87,7 +87,7 @@ async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
}
try {
const role = roleData ?? (await getRoleByName(roleName));
const role = await getRoleByName(roleName);
if (!role) {
return;
}
@@ -114,6 +114,7 @@ async function updateAccessPermissions(roleName, permissionsUpdate, roleData) {
}
}
// Process the current updates
for (const [permissionType, permissions] of Object.entries(updates)) {
const currentTypePermissions = currentPermissions[permissionType] || {};
updatedPermissions[permissionType] = { ...currentTypePermissions };

View File

@@ -22,7 +22,6 @@ const {
} = require('./Message');
const { getConvoTitle, getConvo, saveConvo, deleteConvos } = require('./Conversation');
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
const { File } = require('~/db/models');
module.exports = {
...methods,
@@ -52,6 +51,4 @@ module.exports = {
getPresets,
savePreset,
deletePresets,
Files: File,
};

View File

@@ -1,4 +1,4 @@
const { matchModelName } = require('../utils/tokens');
const { matchModelName } = require('../utils');
const defaultRate = 6;
/**
@@ -87,9 +87,6 @@ const tokenValues = Object.assign(
'gpt-4.1': { prompt: 2, completion: 8 },
'gpt-4.5': { prompt: 75, completion: 150 },
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
'gpt-5': { prompt: 1.25, completion: 10 },
'gpt-5-mini': { prompt: 0.25, completion: 2 },
'gpt-5-nano': { prompt: 0.05, completion: 0.4 },
'gpt-4o': { prompt: 2.5, completion: 10 },
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
'gpt-4-1106': { prompt: 10, completion: 30 },
@@ -150,9 +147,6 @@ const tokenValues = Object.assign(
codestral: { prompt: 0.3, completion: 0.9 },
'ministral-8b': { prompt: 0.1, completion: 0.1 },
'ministral-3b': { prompt: 0.04, completion: 0.04 },
// GPT-OSS models
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
},
bedrockValues,
);
@@ -220,12 +214,6 @@ const getValueKey = (model, endpoint) => {
return 'gpt-4.1';
} else if (modelName.includes('gpt-4o-2024-05-13')) {
return 'gpt-4o-2024-05-13';
} else if (modelName.includes('gpt-5-nano')) {
return 'gpt-5-nano';
} else if (modelName.includes('gpt-5-mini')) {
return 'gpt-5-mini';
} else if (modelName.includes('gpt-5')) {
return 'gpt-5';
} else if (modelName.includes('gpt-4o-mini')) {
return 'gpt-4o-mini';
} else if (modelName.includes('gpt-4o')) {

View File

@@ -25,14 +25,8 @@ describe('getValueKey', () => {
expect(getValueKey('gpt-4-some-other-info')).toBe('8k');
});
it('should return "gpt-5" for model name containing "gpt-5"', () => {
expect(getValueKey('gpt-5-some-other-info')).toBe('gpt-5');
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
it('should return undefined for model names that do not match any known patterns', () => {
expect(getValueKey('gpt-5-some-other-info')).toBeUndefined();
});
it('should return "gpt-3.5-turbo-1106" for model name containing "gpt-3.5-turbo-1106"', () => {
@@ -90,29 +84,6 @@ describe('getValueKey', () => {
expect(getValueKey('gpt-4.1-nano-0125')).toBe('gpt-4.1-nano');
});
it('should return "gpt-5" for model type of "gpt-5"', () => {
expect(getValueKey('gpt-5-2025-01-30')).toBe('gpt-5');
expect(getValueKey('gpt-5-2025-01-30-0130')).toBe('gpt-5');
expect(getValueKey('openai/gpt-5')).toBe('gpt-5');
expect(getValueKey('openai/gpt-5-2025-01-30')).toBe('gpt-5');
expect(getValueKey('gpt-5-turbo')).toBe('gpt-5');
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
});
it('should return "gpt-5-mini" for model type of "gpt-5-mini"', () => {
expect(getValueKey('gpt-5-mini-2025-01-30')).toBe('gpt-5-mini');
expect(getValueKey('openai/gpt-5-mini')).toBe('gpt-5-mini');
expect(getValueKey('gpt-5-mini-0130')).toBe('gpt-5-mini');
expect(getValueKey('gpt-5-mini-2025-01-30-0130')).toBe('gpt-5-mini');
});
it('should return "gpt-5-nano" for model type of "gpt-5-nano"', () => {
expect(getValueKey('gpt-5-nano-2025-01-30')).toBe('gpt-5-nano');
expect(getValueKey('openai/gpt-5-nano')).toBe('gpt-5-nano');
expect(getValueKey('gpt-5-nano-0130')).toBe('gpt-5-nano');
expect(getValueKey('gpt-5-nano-2025-01-30-0130')).toBe('gpt-5-nano');
});
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
@@ -236,48 +207,6 @@ describe('getMultiplier', () => {
);
});
it('should return the correct multiplier for gpt-5', () => {
const valueKey = getValueKey('gpt-5-2025-01-30');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5'].prompt);
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
tokenValues['gpt-5'].completion,
);
expect(getMultiplier({ model: 'gpt-5-preview', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5'].prompt,
);
expect(getMultiplier({ model: 'openai/gpt-5', tokenType: 'completion' })).toBe(
tokenValues['gpt-5'].completion,
);
});
it('should return the correct multiplier for gpt-5-mini', () => {
const valueKey = getValueKey('gpt-5-mini-2025-01-30');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-mini'].prompt);
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
tokenValues['gpt-5-mini'].completion,
);
expect(getMultiplier({ model: 'gpt-5-mini-preview', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5-mini'].prompt,
);
expect(getMultiplier({ model: 'openai/gpt-5-mini', tokenType: 'completion' })).toBe(
tokenValues['gpt-5-mini'].completion,
);
});
it('should return the correct multiplier for gpt-5-nano', () => {
const valueKey = getValueKey('gpt-5-nano-2025-01-30');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-5-nano'].prompt);
expect(getMultiplier({ valueKey, tokenType: 'completion' })).toBe(
tokenValues['gpt-5-nano'].completion,
);
expect(getMultiplier({ model: 'gpt-5-nano-preview', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5-nano'].prompt,
);
expect(getMultiplier({ model: 'openai/gpt-5-nano', tokenType: 'completion' })).toBe(
tokenValues['gpt-5-nano'].completion,
);
});
it('should return the correct multiplier for gpt-4o', () => {
const valueKey = getValueKey('gpt-4o-2024-08-06');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);
@@ -378,22 +307,10 @@ describe('getMultiplier', () => {
});
it('should return defaultRate if derived valueKey does not match any known patterns', () => {
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-10-some-other-info' })).toBe(
expect(getMultiplier({ tokenType: 'prompt', model: 'gpt-5-some-other-info' })).toBe(
defaultRate,
);
});
it('should return correct multipliers for GPT-OSS models', () => {
const models = ['gpt-oss-20b', 'gpt-oss-120b'];
models.forEach((key) => {
const expectedPrompt = tokenValues[key].prompt;
const expectedCompletion = tokenValues[key].completion;
expect(getMultiplier({ valueKey: key, tokenType: 'prompt' })).toBe(expectedPrompt);
expect(getMultiplier({ valueKey: key, tokenType: 'completion' })).toBe(expectedCompletion);
expect(getMultiplier({ model: key, tokenType: 'prompt' })).toBe(expectedPrompt);
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
});
});
});
describe('AWS Bedrock Model Tests', () => {

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/backend",
"version": "v0.8.0-rc2",
"version": "v0.8.0-rc1",
"description": "",
"scripts": {
"start": "echo 'please run this from the root directory'",
@@ -49,12 +49,11 @@
"@langchain/google-vertexai": "^0.2.13",
"@langchain/openai": "^0.5.18",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.75",
"@librechat/agents": "^2.4.69",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@modelcontextprotocol/sdk": "^1.17.1",
"@node-saml/passport-saml": "^5.1.0",
"@microsoft/microsoft-graph-client": "^3.0.7",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",
"bcryptjs": "^2.4.3",

View File

@@ -12,7 +12,6 @@ const {
} = require('~/server/services/AuthService');
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
const { getOpenIdConfig } = require('~/strategies');
const { getGraphApiToken } = require('~/server/services/GraphTokenService');
const registrationController = async (req, res) => {
try {
@@ -119,54 +118,9 @@ const refreshController = async (req, res) => {
}
};
const graphTokenController = async (req, res) => {
try {
// Validate user is authenticated via Entra ID
if (!req.user.openidId || req.user.provider !== 'openid') {
return res.status(403).json({
message: 'Microsoft Graph access requires Entra ID authentication',
});
}
// Check if OpenID token reuse is active (required for on-behalf-of flow)
if (!isEnabled(process.env.OPENID_REUSE_TOKENS)) {
return res.status(403).json({
message: 'SharePoint integration requires OpenID token reuse to be enabled',
});
}
// Extract access token from Authorization header
const authHeader = req.headers.authorization;
if (!authHeader || !authHeader.startsWith('Bearer ')) {
return res.status(401).json({
message: 'Valid authorization token required',
});
}
// Get scopes from query parameters
const scopes = req.query.scopes;
if (!scopes) {
return res.status(400).json({
message: 'Graph API scopes are required as query parameter',
});
}
const accessToken = authHeader.substring(7); // Remove 'Bearer ' prefix
const tokenResponse = await getGraphApiToken(req.user, accessToken, scopes);
res.json(tokenResponse);
} catch (error) {
logger.error('[graphTokenController] Failed to obtain Graph API token:', error);
res.status(500).json({
message: 'Failed to obtain Microsoft Graph token',
});
}
};
module.exports = {
refreshController,
registrationController,
resetPasswordController,
resetPasswordRequestController,
graphTokenController,
};

View File

@@ -0,0 +1,46 @@
const { logger } = require('~/config');
//handle duplicates
const handleDuplicateKeyError = (err, res) => {
logger.error('Duplicate key error:', err.keyValue);
const field = `${JSON.stringify(Object.keys(err.keyValue))}`;
const code = 409;
res
.status(code)
.send({ messages: `An document with that ${field} already exists.`, fields: field });
};
//handle validation errors
const handleValidationError = (err, res) => {
logger.error('Validation error:', err.errors);
let errors = Object.values(err.errors).map((el) => el.message);
let fields = `${JSON.stringify(Object.values(err.errors).map((el) => el.path))}`;
let code = 400;
if (errors.length > 1) {
errors = errors.join(' ');
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
} else {
res.status(code).send({ messages: `${JSON.stringify(errors)}`, fields: fields });
}
};
module.exports = (err, _req, res, _next) => {
try {
if (err.name === 'ValidationError') {
return handleValidationError(err, res);
}
if (err.code && err.code == 11000) {
return handleDuplicateKeyError(err, res);
}
// Special handling for errors like SyntaxError
if (err.statusCode && err.body) {
return res.status(err.statusCode).send(err.body);
}
logger.error('ErrorController => error', err);
return res.status(500).send('An unknown error occurred.');
} catch (err) {
logger.error('ErrorController => processing error', err);
return res.status(500).send('Processing error in ErrorController.');
}
};

View File

@@ -1,43 +1,36 @@
import { logger } from '@librechat/data-schemas';
import { ErrorController } from './error';
import type { Request, Response } from 'express';
import type { ValidationError, MongoServerError, CustomError } from '~/types';
const errorController = require('./ErrorController');
const { logger } = require('~/config');
// Mock the logger
jest.mock('@librechat/data-schemas', () => ({
...jest.requireActual('@librechat/data-schemas'),
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
warn: jest.fn(),
},
}));
describe('ErrorController', () => {
let mockReq: Request;
let mockRes: Response;
let mockReq, mockRes, mockNext;
beforeEach(() => {
mockReq = {
originalUrl: '',
} as Request;
mockReq = {};
mockRes = {
status: jest.fn().mockReturnThis(),
send: jest.fn(),
} as unknown as Response;
(logger.error as jest.Mock).mockClear();
};
mockNext = jest.fn();
logger.error.mockClear();
});
describe('ValidationError handling', () => {
it('should handle ValidationError with single error', () => {
const validationError = {
name: 'ValidationError',
message: 'Validation error',
errors: {
email: { message: 'Email is required', path: 'email' },
},
} as ValidationError;
};
ErrorController(validationError, mockReq, mockRes);
errorController(validationError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.send).toHaveBeenCalledWith({
@@ -50,14 +43,13 @@ describe('ErrorController', () => {
it('should handle ValidationError with multiple errors', () => {
const validationError = {
name: 'ValidationError',
message: 'Validation error',
errors: {
email: { message: 'Email is required', path: 'email' },
password: { message: 'Password is required', path: 'password' },
},
} as ValidationError;
};
ErrorController(validationError, mockReq, mockRes);
errorController(validationError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.send).toHaveBeenCalledWith({
@@ -71,9 +63,9 @@ describe('ErrorController', () => {
const validationError = {
name: 'ValidationError',
errors: {},
} as ValidationError;
};
ErrorController(validationError, mockReq, mockRes);
errorController(validationError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.send).toHaveBeenCalledWith({
@@ -86,59 +78,43 @@ describe('ErrorController', () => {
describe('Duplicate key error handling', () => {
it('should handle duplicate key error (code 11000)', () => {
const duplicateKeyError = {
name: 'MongoServerError',
message: 'Duplicate key error',
code: 11000,
keyValue: { email: 'test@example.com' },
errmsg:
'E11000 duplicate key error collection: test.users index: email_1 dup key: { email: "test@example.com" }',
} as MongoServerError;
};
ErrorController(duplicateKeyError, mockReq, mockRes);
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(409);
expect(mockRes.send).toHaveBeenCalledWith({
messages: 'An document with that ["email"] already exists.',
fields: '["email"]',
});
expect(logger.warn).toHaveBeenCalledWith(
'Duplicate key error: E11000 duplicate key error collection: test.users index: email_1 dup key: { email: "test@example.com" }',
);
expect(logger.error).toHaveBeenCalledWith('Duplicate key error:', duplicateKeyError.keyValue);
});
it('should handle duplicate key error with multiple fields', () => {
const duplicateKeyError = {
name: 'MongoServerError',
message: 'Duplicate key error',
code: 11000,
keyValue: { email: 'test@example.com', username: 'testuser' },
errmsg:
'E11000 duplicate key error collection: test.users index: email_1 dup key: { email: "test@example.com" }',
} as MongoServerError;
};
ErrorController(duplicateKeyError, mockReq, mockRes);
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(409);
expect(mockRes.send).toHaveBeenCalledWith({
messages: 'An document with that ["email","username"] already exists.',
fields: '["email","username"]',
});
expect(logger.warn).toHaveBeenCalledWith(
'Duplicate key error: E11000 duplicate key error collection: test.users index: email_1 dup key: { email: "test@example.com" }',
);
expect(logger.error).toHaveBeenCalledWith('Duplicate key error:', duplicateKeyError.keyValue);
});
it('should handle error with code 11000 as string', () => {
const duplicateKeyError = {
name: 'MongoServerError',
message: 'Duplicate key error',
code: 11000,
code: '11000',
keyValue: { email: 'test@example.com' },
errmsg:
'E11000 duplicate key error collection: test.users index: email_1 dup key: { email: "test@example.com" }',
} as MongoServerError;
};
ErrorController(duplicateKeyError, mockReq, mockRes);
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(409);
expect(mockRes.send).toHaveBeenCalledWith({
@@ -153,9 +129,9 @@ describe('ErrorController', () => {
const syntaxError = {
statusCode: 400,
body: 'Invalid JSON syntax',
} as CustomError;
};
ErrorController(syntaxError, mockReq, mockRes);
errorController(syntaxError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.send).toHaveBeenCalledWith('Invalid JSON syntax');
@@ -165,9 +141,9 @@ describe('ErrorController', () => {
const customError = {
statusCode: 422,
body: { error: 'Unprocessable entity' },
} as CustomError;
};
ErrorController(customError, mockReq, mockRes);
errorController(customError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(422);
expect(mockRes.send).toHaveBeenCalledWith({ error: 'Unprocessable entity' });
@@ -176,9 +152,9 @@ describe('ErrorController', () => {
it('should handle error with statusCode but no body', () => {
const partialError = {
statusCode: 400,
} as CustomError;
};
ErrorController(partialError, mockReq, mockRes);
errorController(partialError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
@@ -187,9 +163,9 @@ describe('ErrorController', () => {
it('should handle error with body but no statusCode', () => {
const partialError = {
body: 'Some error message',
} as CustomError;
};
ErrorController(partialError, mockReq, mockRes);
errorController(partialError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
@@ -200,7 +176,7 @@ describe('ErrorController', () => {
it('should handle unknown errors', () => {
const unknownError = new Error('Some unknown error');
ErrorController(unknownError, mockReq, mockRes);
errorController(unknownError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
@@ -211,31 +187,32 @@ describe('ErrorController', () => {
const mongoError = {
code: 11100,
message: 'Some MongoDB error',
} as MongoServerError;
};
ErrorController(mongoError, mockReq, mockRes);
errorController(mongoError, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
expect(logger.error).toHaveBeenCalledWith('ErrorController => error', mongoError);
});
it('should handle generic errors', () => {
const genericError = new Error('Test error');
ErrorController(genericError, mockReq, mockRes);
it('should handle null/undefined errors', () => {
errorController(null, mockReq, mockRes, mockNext);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
expect(logger.error).toHaveBeenCalledWith('ErrorController => error', genericError);
expect(mockRes.send).toHaveBeenCalledWith('Processing error in ErrorController.');
expect(logger.error).toHaveBeenCalledWith(
'ErrorController => processing error',
expect.any(Error),
);
});
});
describe('Catch block handling', () => {
beforeEach(() => {
// Restore logger mock to normal behavior for these tests
(logger.error as jest.Mock).mockRestore();
(logger.error as jest.Mock) = jest.fn();
logger.error.mockRestore();
logger.error = jest.fn();
});
it('should handle errors when logger.error throws', () => {
@@ -243,10 +220,10 @@ describe('ErrorController', () => {
const freshMockRes = {
status: jest.fn().mockReturnThis(),
send: jest.fn(),
} as unknown as Response;
};
// Mock logger to throw on the first call, succeed on the second
(logger.error as jest.Mock)
logger.error
.mockImplementationOnce(() => {
throw new Error('Logger error');
})
@@ -254,7 +231,7 @@ describe('ErrorController', () => {
const testError = new Error('Test error');
ErrorController(testError, mockReq, freshMockRes);
errorController(testError, mockReq, freshMockRes, mockNext);
expect(freshMockRes.status).toHaveBeenCalledWith(500);
expect(freshMockRes.send).toHaveBeenCalledWith('Processing error in ErrorController.');

View File

@@ -5,7 +5,6 @@ const { logger } = require('~/config');
/**
* @param {ServerRequest} req
* @returns {Promise<TModelsConfig>} The models config.
*/
const getModelsConfig = async (req) => {
const cache = getLogStores(CacheKeys.CONFIG_STORE);

View File

@@ -1,471 +0,0 @@
/**
* @import { TUpdateResourcePermissionsRequest, TUpdateResourcePermissionsResponse } from 'librechat-data-provider'
*/
const mongoose = require('mongoose');
const { logger } = require('@librechat/data-schemas');
const { ResourceType, PrincipalType } = require('librechat-data-provider');
const {
bulkUpdateResourcePermissions,
ensureGroupPrincipalExists,
getEffectivePermissions,
ensurePrincipalExists,
getAvailableRoles,
} = require('~/server/services/PermissionService');
const { AclEntry } = require('~/db/models');
const {
searchPrincipals: searchLocalPrincipals,
sortPrincipalsByRelevance,
calculateRelevanceScore,
} = require('~/models');
const {
entraIdPrincipalFeatureEnabled,
searchEntraIdPrincipals,
} = require('~/server/services/GraphApiService');
/**
* Generic controller for resource permission endpoints
* Delegates validation and logic to PermissionService
*/
/**
* Validates that the resourceType is one of the supported enum values
* @param {string} resourceType - The resource type to validate
* @throws {Error} If resourceType is not valid
*/
const validateResourceType = (resourceType) => {
const validTypes = Object.values(ResourceType);
if (!validTypes.includes(resourceType)) {
throw new Error(`Invalid resourceType: ${resourceType}. Valid types: ${validTypes.join(', ')}`);
}
};
/**
* Bulk update permissions for a resource (grant, update, remove)
* @route PUT /api/{resourceType}/{resourceId}/permissions
* @param {Object} req - Express request object
* @param {Object} req.params - Route parameters
* @param {string} req.params.resourceType - Resource type (e.g., 'agent')
* @param {string} req.params.resourceId - Resource ID
* @param {TUpdateResourcePermissionsRequest} req.body - Request body
* @param {Object} res - Express response object
* @returns {Promise<TUpdateResourcePermissionsResponse>} Updated permissions response
*/
const updateResourcePermissions = async (req, res) => {
try {
const { resourceType, resourceId } = req.params;
validateResourceType(resourceType);
/** @type {TUpdateResourcePermissionsRequest} */
const { updated, removed, public: isPublic, publicAccessRoleId } = req.body;
const { id: userId } = req.user;
// Prepare principals for the service call
const updatedPrincipals = [];
const revokedPrincipals = [];
// Add updated principals
if (updated && Array.isArray(updated)) {
updatedPrincipals.push(...updated);
}
// Add public permission if enabled
if (isPublic && publicAccessRoleId) {
updatedPrincipals.push({
type: PrincipalType.PUBLIC,
id: null,
accessRoleId: publicAccessRoleId,
});
}
// Prepare authentication context for enhanced group member fetching
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
const authHeader = req.headers.authorization;
const accessToken =
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
const authContext =
useEntraId && accessToken
? {
accessToken,
sub: req.user.openidId,
}
: null;
// Ensure updated principals exist in the database before processing permissions
const validatedPrincipals = [];
for (const principal of updatedPrincipals) {
try {
let principalId;
if (principal.type === PrincipalType.PUBLIC) {
principalId = null; // Public principals don't need database records
} else if (principal.type === PrincipalType.ROLE) {
principalId = principal.id; // Role principals use role name as ID
} else if (principal.type === PrincipalType.USER) {
principalId = await ensurePrincipalExists(principal);
} else if (principal.type === PrincipalType.GROUP) {
// Pass authContext to enable member fetching for Entra ID groups when available
principalId = await ensureGroupPrincipalExists(principal, authContext);
} else {
logger.error(`Unsupported principal type: ${principal.type}`);
continue; // Skip invalid principal types
}
// Update the principal with the validated ID for ACL operations
validatedPrincipals.push({
...principal,
id: principalId,
});
} catch (error) {
logger.error('Error ensuring principal exists:', {
principal: {
type: principal.type,
id: principal.id,
name: principal.name,
source: principal.source,
},
error: error.message,
});
// Continue with other principals instead of failing the entire operation
continue;
}
}
// Add removed principals
if (removed && Array.isArray(removed)) {
revokedPrincipals.push(...removed);
}
// If public is disabled, add public to revoked list
if (!isPublic) {
revokedPrincipals.push({
type: PrincipalType.PUBLIC,
id: null,
});
}
const results = await bulkUpdateResourcePermissions({
resourceType,
resourceId,
updatedPrincipals: validatedPrincipals,
revokedPrincipals,
grantedBy: userId,
});
/** @type {TUpdateResourcePermissionsResponse} */
const response = {
message: 'Permissions updated successfully',
results: {
principals: results.granted,
public: isPublic || false,
publicAccessRoleId: isPublic ? publicAccessRoleId : undefined,
},
};
res.status(200).json(response);
} catch (error) {
logger.error('Error updating resource permissions:', error);
res.status(400).json({
error: 'Failed to update permissions',
details: error.message,
});
}
};
/**
* Get principals with their permission roles for a resource (UI-friendly format)
* Uses efficient aggregation pipeline to join User/Group data in single query
* @route GET /api/permissions/{resourceType}/{resourceId}
*/
const getResourcePermissions = async (req, res) => {
try {
const { resourceType, resourceId } = req.params;
validateResourceType(resourceType);
// Use aggregation pipeline for efficient single-query data retrieval
const results = await AclEntry.aggregate([
// Match ACL entries for this resource
{
$match: {
resourceType,
resourceId: mongoose.Types.ObjectId.isValid(resourceId)
? mongoose.Types.ObjectId.createFromHexString(resourceId)
: resourceId,
},
},
// Lookup AccessRole information
{
$lookup: {
from: 'accessroles',
localField: 'roleId',
foreignField: '_id',
as: 'role',
},
},
// Lookup User information (for user principals)
{
$lookup: {
from: 'users',
localField: 'principalId',
foreignField: '_id',
as: 'userInfo',
},
},
// Lookup Group information (for group principals)
{
$lookup: {
from: 'groups',
localField: 'principalId',
foreignField: '_id',
as: 'groupInfo',
},
},
// Project final structure
{
$project: {
principalType: 1,
principalId: 1,
accessRoleId: { $arrayElemAt: ['$role.accessRoleId', 0] },
userInfo: { $arrayElemAt: ['$userInfo', 0] },
groupInfo: { $arrayElemAt: ['$groupInfo', 0] },
},
},
]);
const principals = [];
let publicPermission = null;
// Process aggregation results
for (const result of results) {
if (result.principalType === PrincipalType.PUBLIC) {
publicPermission = {
public: true,
publicAccessRoleId: result.accessRoleId,
};
} else if (result.principalType === PrincipalType.USER && result.userInfo) {
principals.push({
type: PrincipalType.USER,
id: result.userInfo._id.toString(),
name: result.userInfo.name || result.userInfo.username,
email: result.userInfo.email,
avatar: result.userInfo.avatar,
source: !result.userInfo._id ? 'entra' : 'local',
idOnTheSource: result.userInfo.idOnTheSource || result.userInfo._id.toString(),
accessRoleId: result.accessRoleId,
});
} else if (result.principalType === PrincipalType.GROUP && result.groupInfo) {
principals.push({
type: PrincipalType.GROUP,
id: result.groupInfo._id.toString(),
name: result.groupInfo.name,
email: result.groupInfo.email,
description: result.groupInfo.description,
avatar: result.groupInfo.avatar,
source: result.groupInfo.source || 'local',
idOnTheSource: result.groupInfo.idOnTheSource || result.groupInfo._id.toString(),
accessRoleId: result.accessRoleId,
});
} else if (result.principalType === PrincipalType.ROLE) {
principals.push({
type: PrincipalType.ROLE,
/** Role name as ID */
id: result.principalId,
/** Display the role name */
name: result.principalId,
description: `System role: ${result.principalId}`,
accessRoleId: result.accessRoleId,
});
}
}
// Return response in format expected by frontend
const response = {
resourceType,
resourceId,
principals,
public: publicPermission?.public || false,
...(publicPermission?.publicAccessRoleId && {
publicAccessRoleId: publicPermission.publicAccessRoleId,
}),
};
res.status(200).json(response);
} catch (error) {
logger.error('Error getting resource permissions principals:', error);
res.status(500).json({
error: 'Failed to get permissions principals',
details: error.message,
});
}
};
/**
* Get available roles for a resource type
* @route GET /api/{resourceType}/roles
*/
const getResourceRoles = async (req, res) => {
try {
const { resourceType } = req.params;
validateResourceType(resourceType);
const roles = await getAvailableRoles({ resourceType });
res.status(200).json(
roles.map((role) => ({
accessRoleId: role.accessRoleId,
name: role.name,
description: role.description,
permBits: role.permBits,
})),
);
} catch (error) {
logger.error('Error getting resource roles:', error);
res.status(500).json({
error: 'Failed to get roles',
details: error.message,
});
}
};
/**
* Get user's effective permission bitmask for a resource
* @route GET /api/{resourceType}/{resourceId}/effective
*/
const getUserEffectivePermissions = async (req, res) => {
try {
const { resourceType, resourceId } = req.params;
validateResourceType(resourceType);
const { id: userId } = req.user;
const permissionBits = await getEffectivePermissions({
userId,
role: req.user.role,
resourceType,
resourceId,
});
res.status(200).json({
permissionBits,
});
} catch (error) {
logger.error('Error getting user effective permissions:', error);
res.status(500).json({
error: 'Failed to get effective permissions',
details: error.message,
});
}
};
/**
* Search for users and groups to grant permissions
* Supports hybrid local database + Entra ID search when configured
* @route GET /api/permissions/search-principals
*/
const searchPrincipals = async (req, res) => {
try {
const { q: query, limit = 20, type } = req.query;
if (!query || query.trim().length === 0) {
return res.status(400).json({
error: 'Query parameter "q" is required and must not be empty',
});
}
if (query.trim().length < 2) {
return res.status(400).json({
error: 'Query must be at least 2 characters long',
});
}
const searchLimit = Math.min(Math.max(1, parseInt(limit) || 10), 50);
const typeFilter = [PrincipalType.USER, PrincipalType.GROUP, PrincipalType.ROLE].includes(type)
? type
: null;
const localResults = await searchLocalPrincipals(query.trim(), searchLimit, typeFilter);
let allPrincipals = [...localResults];
const useEntraId = entraIdPrincipalFeatureEnabled(req.user);
if (useEntraId && localResults.length < searchLimit) {
try {
const graphTypeMap = {
user: 'users',
group: 'groups',
null: 'all',
};
const authHeader = req.headers.authorization;
const accessToken =
authHeader && authHeader.startsWith('Bearer ') ? authHeader.substring(7) : null;
if (accessToken) {
const graphResults = await searchEntraIdPrincipals(
accessToken,
req.user.openidId,
query.trim(),
graphTypeMap[typeFilter],
searchLimit - localResults.length,
);
const localEmails = new Set(
localResults.map((p) => p.email?.toLowerCase()).filter(Boolean),
);
const localGroupSourceIds = new Set(
localResults.map((p) => p.idOnTheSource).filter(Boolean),
);
for (const principal of graphResults) {
const isDuplicateByEmail =
principal.email && localEmails.has(principal.email.toLowerCase());
const isDuplicateBySourceId =
principal.idOnTheSource && localGroupSourceIds.has(principal.idOnTheSource);
if (!isDuplicateByEmail && !isDuplicateBySourceId) {
allPrincipals.push(principal);
}
}
}
} catch (graphError) {
logger.warn('Graph API search failed, falling back to local results:', graphError.message);
}
}
const scoredResults = allPrincipals.map((item) => ({
...item,
_searchScore: calculateRelevanceScore(item, query.trim()),
}));
allPrincipals = sortPrincipalsByRelevance(scoredResults)
.slice(0, searchLimit)
.map((result) => {
const { _searchScore, ...resultWithoutScore } = result;
return resultWithoutScore;
});
res.status(200).json({
query: query.trim(),
limit: searchLimit,
type: typeFilter,
results: allPrincipals,
count: allPrincipals.length,
sources: {
local: allPrincipals.filter((r) => r.source === 'local').length,
entra: allPrincipals.filter((r) => r.source === 'entra').length,
},
});
} catch (error) {
logger.error('Error searching principals:', error);
res.status(500).json({
error: 'Failed to search principals',
details: error.message,
});
}
};
module.exports = {
updateResourcePermissions,
getResourcePermissions,
getResourceRoles,
getUserEffectivePermissions,
searchPrincipals,
};

View File

@@ -1,16 +1,54 @@
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, Constants } = require('librechat-data-provider');
const {
getToolkitKey,
checkPluginAuth,
filterUniquePlugins,
convertMCPToolsToPlugins,
} = require('@librechat/api');
const { CacheKeys, AuthType, Constants } = require('librechat-data-provider');
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
const { availableTools, toolkits } = require('~/app/clients/tools');
const { getToolkitKey } = require('~/server/services/ToolService');
const { getMCPManager, getFlowStateManager } = require('~/config');
const { availableTools } = require('~/app/clients/tools');
const { getLogStores } = require('~/cache');
/**
* Filters out duplicate plugins from the list of plugins.
*
* @param {TPlugin[]} plugins The list of plugins to filter.
* @returns {TPlugin[]} The list of plugins with duplicates removed.
*/
const filterUniquePlugins = (plugins) => {
const seen = new Set();
return plugins.filter((plugin) => {
const duplicate = seen.has(plugin.pluginKey);
seen.add(plugin.pluginKey);
return !duplicate;
});
};
/**
* Determines if a plugin is authenticated by checking if all required authentication fields have non-empty values.
* Supports alternate authentication fields, allowing validation against multiple possible environment variables.
*
* @param {TPlugin} plugin The plugin object containing the authentication configuration.
* @returns {boolean} True if the plugin is authenticated for all required fields, false otherwise.
*/
const checkPluginAuth = (plugin) => {
if (!plugin.authConfig || plugin.authConfig.length === 0) {
return false;
}
return plugin.authConfig.every((authFieldObj) => {
const authFieldOptions = authFieldObj.authField.split('||');
let isFieldAuthenticated = false;
for (const fieldOption of authFieldOptions) {
const envValue = process.env[fieldOption];
if (envValue && envValue.trim() !== '' && envValue !== AuthType.USER_PROVIDED) {
isFieldAuthenticated = true;
break;
}
}
return isFieldAuthenticated;
});
};
const getAvailablePluginsController = async (req, res) => {
try {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
@@ -105,9 +143,9 @@ const getAvailableTools = async (req, res) => {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
const cachedUserTools = await getCachedTools({ userId });
const userPlugins = convertMCPToolsToPlugins({ functionTools: cachedUserTools, customConfig });
const userPlugins = convertMCPToolsToPlugins(cachedUserTools, customConfig);
if (cachedToolsArray != null && userPlugins != null) {
if (cachedToolsArray && userPlugins) {
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
res.status(200).json(dedupedTools);
return;
@@ -147,9 +185,7 @@ const getAvailableTools = async (req, res) => {
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
const isToolkit =
plugin.toolkit === true &&
Object.keys(toolDefinitions).some(
(key) => getToolkitKey({ toolkits, toolName: key }) === plugin.pluginKey,
);
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey);
if (!isToolDefined && !isToolkit) {
continue;
@@ -199,6 +235,58 @@ const getAvailableTools = async (req, res) => {
}
};
/**
* Converts MCP function format tools to plugin format
* @param {Object} functionTools - Object with function format tools
* @param {Object} customConfig - Custom configuration for MCP servers
* @returns {Array} Array of plugin objects
*/
function convertMCPToolsToPlugins(functionTools, customConfig) {
const plugins = [];
for (const [toolKey, toolData] of Object.entries(functionTools)) {
if (!toolData.function || !toolKey.includes(Constants.mcp_delimiter)) {
continue;
}
const functionData = toolData.function;
const parts = toolKey.split(Constants.mcp_delimiter);
const serverName = parts[parts.length - 1];
const serverConfig = customConfig?.mcpServers?.[serverName];
const plugin = {
name: parts[0], // Use the tool name without server suffix
pluginKey: toolKey,
description: functionData.description || '',
authenticated: true,
icon: serverConfig?.iconPath,
};
// Build authConfig for MCP tools
if (!serverConfig?.customUserVars) {
plugin.authConfig = [];
plugins.push(plugin);
continue;
}
const customVarKeys = Object.keys(serverConfig.customUserVars);
if (customVarKeys.length === 0) {
plugin.authConfig = [];
} else {
plugin.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
authField: key,
label: value.title || key,
description: value.description || '',
}));
}
plugins.push(plugin);
}
return plugins;
}
module.exports = {
getAvailableTools,
getAvailablePluginsController,

View File

@@ -28,211 +28,19 @@ jest.mock('~/config', () => ({
jest.mock('~/app/clients/tools', () => ({
availableTools: [],
toolkits: [],
}));
jest.mock('~/cache', () => ({
getLogStores: jest.fn(),
}));
jest.mock('@librechat/api', () => ({
getToolkitKey: jest.fn(),
checkPluginAuth: jest.fn(),
filterUniquePlugins: jest.fn(),
convertMCPToolsToPlugins: jest.fn(),
}));
// Import the actual module with the function we want to test
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
const {
filterUniquePlugins,
checkPluginAuth,
convertMCPToolsToPlugins,
getToolkitKey,
} = require('@librechat/api');
const { getAvailableTools } = require('./PluginController');
describe('PluginController', () => {
let mockReq, mockRes, mockCache;
beforeEach(() => {
jest.clearAllMocks();
mockReq = { user: { id: 'test-user-id' } };
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
mockCache = { get: jest.fn(), set: jest.fn() };
getLogStores.mockReturnValue(mockCache);
});
describe('getAvailablePluginsController', () => {
beforeEach(() => {
mockReq.app = { locals: { filteredTools: [], includedTools: [] } };
});
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
const mockPlugins = [
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
];
mockCache.get.mockResolvedValue(null);
filterUniquePlugins.mockReturnValue(mockPlugins);
checkPluginAuth.mockReturnValue(true);
await getAvailablePluginsController(mockReq, mockRes);
expect(filterUniquePlugins).toHaveBeenCalled();
expect(mockRes.status).toHaveBeenCalledWith(200);
// The response includes authenticated: true for each plugin when checkPluginAuth returns true
expect(mockRes.json).toHaveBeenCalledWith([
{ name: 'Plugin1', pluginKey: 'key1', description: 'First', authenticated: true },
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second', authenticated: true },
]);
});
it('should use checkPluginAuth to verify plugin authentication', async () => {
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
mockCache.get.mockResolvedValue(null);
filterUniquePlugins.mockReturnValue([mockPlugin]);
checkPluginAuth.mockReturnValueOnce(true);
await getAvailablePluginsController(mockReq, mockRes);
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
const responseData = mockRes.json.mock.calls[0][0];
expect(responseData[0].authenticated).toBe(true);
});
it('should return cached plugins when available', async () => {
const cachedPlugins = [
{ name: 'CachedPlugin', pluginKey: 'cached', description: 'Cached plugin' },
];
mockCache.get.mockResolvedValue(cachedPlugins);
await getAvailablePluginsController(mockReq, mockRes);
expect(filterUniquePlugins).not.toHaveBeenCalled();
expect(checkPluginAuth).not.toHaveBeenCalled();
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
});
it('should filter plugins based on includedTools', async () => {
const mockPlugins = [
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
];
mockReq.app.locals.includedTools = ['key1'];
mockCache.get.mockResolvedValue(null);
filterUniquePlugins.mockReturnValue(mockPlugins);
checkPluginAuth.mockReturnValue(false);
await getAvailablePluginsController(mockReq, mockRes);
const responseData = mockRes.json.mock.calls[0][0];
expect(responseData).toHaveLength(1);
expect(responseData[0].pluginKey).toBe('key1');
});
});
describe('getAvailableTools', () => {
it('should use convertMCPToolsToPlugins for user-specific MCP tools', async () => {
const mockUserTools = {
[`tool1${Constants.mcp_delimiter}server1`]: {
function: { name: 'tool1', description: 'Tool 1' },
},
};
const mockConvertedPlugins = [
{
name: 'tool1',
pluginKey: `tool1${Constants.mcp_delimiter}server1`,
description: 'Tool 1',
},
];
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValueOnce(mockUserTools);
convertMCPToolsToPlugins.mockReturnValue(mockConvertedPlugins);
filterUniquePlugins.mockImplementation((plugins) => plugins);
getCustomConfig.mockResolvedValue(null);
await getAvailableTools(mockReq, mockRes);
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
functionTools: mockUserTools,
customConfig: null,
});
});
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
const mockUserPlugins = [
{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' },
];
const mockManifestPlugins = [
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
];
mockCache.get.mockResolvedValue(mockManifestPlugins);
getCachedTools.mockResolvedValueOnce({});
convertMCPToolsToPlugins.mockReturnValue(mockUserPlugins);
filterUniquePlugins.mockReturnValue([...mockUserPlugins, ...mockManifestPlugins]);
getCustomConfig.mockResolvedValue(null);
await getAvailableTools(mockReq, mockRes);
// Should be called to deduplicate the combined array
expect(filterUniquePlugins).toHaveBeenLastCalledWith([
...mockUserPlugins,
...mockManifestPlugins,
]);
});
it('should use checkPluginAuth to verify authentication status', async () => {
const mockPlugin = { name: 'Tool1', pluginKey: 'tool1', description: 'Tool 1' };
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValue({});
convertMCPToolsToPlugins.mockReturnValue([]);
filterUniquePlugins.mockReturnValue([mockPlugin]);
checkPluginAuth.mockReturnValue(true);
getCustomConfig.mockResolvedValue(null);
// Mock getCachedTools second call to return tool definitions
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({ tool1: true });
await getAvailableTools(mockReq, mockRes);
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
});
it('should use getToolkitKey for toolkit validation', async () => {
const mockToolkit = {
name: 'Toolkit1',
pluginKey: 'toolkit1',
description: 'Toolkit 1',
toolkit: true,
};
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValue({});
convertMCPToolsToPlugins.mockReturnValue([]);
filterUniquePlugins.mockReturnValue([mockToolkit]);
checkPluginAuth.mockReturnValue(false);
getToolkitKey.mockReturnValue('toolkit1');
getCustomConfig.mockResolvedValue(null);
// Mock getCachedTools second call to return tool definitions
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({
toolkit1_function: true,
});
await getAvailableTools(mockReq, mockRes);
expect(getToolkitKey).toHaveBeenCalled();
});
});
describe('plugin.icon behavior', () => {
let mockReq, mockRes, mockCache;
const callGetAvailableToolsWithMCPServer = async (mcpServers) => {
mockCache.get.mockResolvedValue(null);
getCustomConfig.mockResolvedValue({ mcpServers });
@@ -242,22 +50,7 @@ describe('PluginController', () => {
function: { name: 'test-tool', description: 'A test tool' },
},
};
const mockConvertedPlugin = {
name: 'test-tool',
pluginKey: `test-tool${Constants.mcp_delimiter}test-server`,
description: 'A test tool',
icon: mcpServers['test-server']?.iconPath,
authenticated: true,
authConfig: [],
};
getCachedTools.mockResolvedValueOnce(functionTools);
convertMCPToolsToPlugins.mockReturnValue([mockConvertedPlugin]);
filterUniquePlugins.mockImplementation((plugins) => plugins);
checkPluginAuth.mockReturnValue(true);
getToolkitKey.mockReturnValue(undefined);
getCachedTools.mockResolvedValueOnce({
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
});
@@ -267,6 +60,14 @@ describe('PluginController', () => {
return responseData.find((tool) => tool.name === 'test-tool');
};
beforeEach(() => {
jest.clearAllMocks();
mockReq = { user: { id: 'test-user-id' } };
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
mockCache = { get: jest.fn(), set: jest.fn() };
getLogStores.mockReturnValue(mockCache);
});
it('should set plugin.icon when iconPath is defined', async () => {
const mcpServers = {
'test-server': {
@@ -285,236 +86,4 @@ describe('PluginController', () => {
expect(testTool.icon).toBeUndefined();
});
});
describe('helper function integration', () => {
it('should properly handle MCP tools with custom user variables', async () => {
const customConfig = {
mcpServers: {
'test-server': {
customUserVars: {
API_KEY: { title: 'API Key', description: 'Your API key' },
},
},
},
};
// We need to test the actual flow where MCP manager tools are included
const mcpManagerTools = [
{
name: 'tool1',
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
description: 'Tool 1',
authenticated: true,
},
];
// Mock the MCP manager to return tools
const mockMCPManager = {
loadManifestTools: jest.fn().mockResolvedValue(mcpManagerTools),
};
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
mockCache.get.mockResolvedValue(null);
getCustomConfig.mockResolvedValue(customConfig);
// First call returns user tools (empty in this case)
getCachedTools.mockResolvedValueOnce({});
// Mock convertMCPToolsToPlugins to return empty array for user tools
convertMCPToolsToPlugins.mockReturnValue([]);
// Mock filterUniquePlugins to pass through
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
// Mock checkPluginAuth
checkPluginAuth.mockReturnValue(true);
// Second call returns tool definitions
getCachedTools.mockResolvedValueOnce({
[`tool1${Constants.mcp_delimiter}test-server`]: true,
});
await getAvailableTools(mockReq, mockRes);
const responseData = mockRes.json.mock.calls[0][0];
// Find the MCP tool in the response
const mcpTool = responseData.find(
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}test-server`,
);
// The actual implementation adds authConfig and sets authenticated to false when customUserVars exist
expect(mcpTool).toBeDefined();
expect(mcpTool.authConfig).toEqual([
{ authField: 'API_KEY', label: 'API Key', description: 'Your API key' },
]);
expect(mcpTool.authenticated).toBe(false);
});
it('should handle error cases gracefully', async () => {
mockCache.get.mockRejectedValue(new Error('Cache error'));
await getAvailableTools(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.json).toHaveBeenCalledWith({ message: 'Cache error' });
});
});
describe('edge cases with undefined/null values', () => {
it('should handle undefined cache gracefully', async () => {
getLogStores.mockReturnValue(undefined);
await getAvailableTools(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(500);
});
it('should handle null cachedTools and cachedUserTools', async () => {
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValue(null);
convertMCPToolsToPlugins.mockReturnValue(undefined);
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
getCustomConfig.mockResolvedValue(null);
await getAvailableTools(mockReq, mockRes);
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
functionTools: null,
customConfig: null,
});
});
it('should handle when getCachedTools returns undefined', async () => {
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValue(undefined);
convertMCPToolsToPlugins.mockReturnValue(undefined);
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
getCustomConfig.mockResolvedValue(null);
checkPluginAuth.mockReturnValue(false);
// Mock getCachedTools to return undefined for both calls
getCachedTools.mockReset();
getCachedTools.mockResolvedValueOnce(undefined).mockResolvedValueOnce(undefined);
await getAvailableTools(mockReq, mockRes);
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
functionTools: undefined,
customConfig: null,
});
});
it('should handle cachedToolsArray and userPlugins both being defined', async () => {
const cachedTools = [{ name: 'CachedTool', pluginKey: 'cached-tool', description: 'Cached' }];
const userTools = {
'user-tool': { function: { name: 'user-tool', description: 'User tool' } },
};
const userPlugins = [{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' }];
mockCache.get.mockResolvedValue(cachedTools);
getCachedTools.mockResolvedValue(userTools);
convertMCPToolsToPlugins.mockReturnValue(userPlugins);
filterUniquePlugins.mockReturnValue([...userPlugins, ...cachedTools]);
await getAvailableTools(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([...userPlugins, ...cachedTools]);
});
it('should handle empty toolDefinitions object', async () => {
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({});
convertMCPToolsToPlugins.mockReturnValue([]);
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
getCustomConfig.mockResolvedValue(null);
checkPluginAuth.mockReturnValue(true);
await getAvailableTools(mockReq, mockRes);
// With empty tool definitions, no tools should be in the final output
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should handle MCP tools without customUserVars', async () => {
const customConfig = {
mcpServers: {
'test-server': {
// No customUserVars defined
},
},
};
const mockUserTools = {
[`tool1${Constants.mcp_delimiter}test-server`]: {
function: { name: 'tool1', description: 'Tool 1' },
},
};
mockCache.get.mockResolvedValue(null);
getCustomConfig.mockResolvedValue(customConfig);
getCachedTools.mockResolvedValueOnce(mockUserTools);
const mockPlugin = {
name: 'tool1',
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
description: 'Tool 1',
authenticated: true,
authConfig: [],
};
convertMCPToolsToPlugins.mockReturnValue([mockPlugin]);
filterUniquePlugins.mockImplementation((plugins) => plugins);
checkPluginAuth.mockReturnValue(true);
getCachedTools.mockResolvedValueOnce({
[`tool1${Constants.mcp_delimiter}test-server`]: true,
});
await getAvailableTools(mockReq, mockRes);
const responseData = mockRes.json.mock.calls[0][0];
expect(responseData[0].authenticated).toBe(true);
// The actual implementation doesn't set authConfig on tools without customUserVars
expect(responseData[0].authConfig).toEqual([]);
});
it('should handle req.app.locals with undefined filteredTools and includedTools', async () => {
mockReq.app = { locals: {} };
mockCache.get.mockResolvedValue(null);
filterUniquePlugins.mockReturnValue([]);
checkPluginAuth.mockReturnValue(false);
await getAvailablePluginsController(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.json).toHaveBeenCalledWith([]);
});
it('should handle toolkit with undefined toolDefinitions keys', async () => {
const mockToolkit = {
name: 'Toolkit1',
pluginKey: 'toolkit1',
description: 'Toolkit 1',
toolkit: true,
};
mockCache.get.mockResolvedValue(null);
getCachedTools.mockResolvedValue({});
convertMCPToolsToPlugins.mockReturnValue([]);
filterUniquePlugins.mockReturnValue([mockToolkit]);
checkPluginAuth.mockReturnValue(false);
getToolkitKey.mockReturnValue(undefined);
getCustomConfig.mockResolvedValue(null);
// Mock getCachedTools second call to return null
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce(null);
await getAvailableTools(mockReq, mockRes);
// Should handle null toolDefinitions gracefully
expect(mockRes.status).toHaveBeenCalledWith(200);
});
});
});

View File

@@ -99,36 +99,10 @@ const confirm2FA = async (req, res) => {
/**
* Disable 2FA by clearing the stored secret and backup codes.
* Requires verification with either TOTP token or backup code if 2FA is fully enabled.
*/
const disable2FA = async (req, res) => {
try {
const userId = req.user.id;
const { token, backupCode } = req.body;
const user = await getUserById(userId);
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA is not setup for this user' });
}
if (user.twoFactorEnabled) {
const secret = await getTOTPSecret(user.totpSecret);
let isVerified = false;
if (token) {
isVerified = await verifyTOTP(secret, token);
} else if (backupCode) {
isVerified = await verifyBackupCode({ user, backupCode });
} else {
return res
.status(400)
.json({ message: 'Either token or backup code is required to disable 2FA' });
}
if (!isVerified) {
return res.status(401).json({ message: 'Invalid token or backup code' });
}
}
await updateUser(userId, { totpSecret: null, backupCodes: [], twoFactorEnabled: false });
return res.status(200).json();
} catch (err) {

View File

@@ -1,5 +1,5 @@
const { logger } = require('@librechat/data-schemas');
const { webSearchKeys, extractWebSearchEnvVars, normalizeHttpError } = require('@librechat/api');
const { webSearchKeys, extractWebSearchEnvVars } = require('@librechat/api');
const {
getFiles,
updateUser,
@@ -89,8 +89,8 @@ const updateUserPluginsController = async (req, res) => {
if (userPluginsService instanceof Error) {
logger.error('[userPluginsService]', userPluginsService);
const { status, message } = normalizeHttpError(userPluginsService);
return res.status(status).send({ message });
const { status, message } = userPluginsService;
res.status(status).send({ message });
}
}
@@ -137,7 +137,7 @@ const updateUserPluginsController = async (req, res) => {
authService = await updateUserPluginAuth(user.id, keys[i], pluginKey, values[i]);
if (authService instanceof Error) {
logger.error('[authService]', authService);
({ status, message } = normalizeHttpError(authService));
({ status, message } = authService);
}
}
} else if (action === 'uninstall') {
@@ -151,7 +151,7 @@ const updateUserPluginsController = async (req, res) => {
`[authService] Error deleting all auth for MCP tool ${pluginKey}:`,
authService,
);
({ status, message } = normalizeHttpError(authService));
({ status, message } = authService);
}
} else {
// This handles:
@@ -163,7 +163,7 @@ const updateUserPluginsController = async (req, res) => {
authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name
if (authService instanceof Error) {
logger.error('[authService] Error deleting specific auth key:', authService);
({ status, message } = normalizeHttpError(authService));
({ status, message } = authService);
}
}
}
@@ -193,8 +193,7 @@ const updateUserPluginsController = async (req, res) => {
return res.status(status).send();
}
const normalized = normalizeHttpError({ status, message });
return res.status(normalized.status).send({ message: normalized.message });
res.status(status).send({ message });
} catch (err) {
logger.error('[updateUserPluginsController]', err);
return res.status(500).json({ message: 'Something went wrong.' });

View File

@@ -11,7 +11,6 @@ const {
handleToolCalls,
ChatModelStreamHandler,
} = require('@librechat/agents');
const { processFileCitations } = require('~/server/services/Files/Citations');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { saveBase64Image } = require('~/server/services/Files/process');
@@ -239,31 +238,6 @@ function createToolEndCallback({ req, res, artifactPromises }) {
return;
}
if (output.artifact[Tools.file_search]) {
artifactPromises.push(
(async () => {
const user = req.user;
const attachment = await processFileCitations({
user,
metadata,
toolArtifact: output.artifact,
toolCallId: output.tool_call_id,
});
if (!attachment) {
return null;
}
if (!res.headersSent) {
return attachment;
}
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
return attachment;
})().catch((error) => {
logger.error('Error processing file citations:', error);
return null;
}),
);
}
if (output.artifact[Tools.web_search]) {
artifactPromises.push(
(async () => {

View File

@@ -402,34 +402,6 @@ class AgentClient extends BaseClient {
return result;
}
/**
* Creates a promise that resolves with the memory promise result or undefined after a timeout
* @param {Promise<(TAttachment | null)[] | undefined>} memoryPromise - The memory promise to await
* @param {number} timeoutMs - Timeout in milliseconds (default: 3000)
* @returns {Promise<(TAttachment | null)[] | undefined>}
*/
async awaitMemoryWithTimeout(memoryPromise, timeoutMs = 3000) {
if (!memoryPromise) {
return;
}
try {
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Memory processing timeout')), timeoutMs),
);
const attachments = await Promise.race([memoryPromise, timeoutPromise]);
return attachments;
} catch (error) {
if (error.message === 'Memory processing timeout') {
logger.warn('[AgentClient] Memory processing timed out after 3 seconds');
} else {
logger.error('[AgentClient] Error processing memory:', error);
}
return;
}
}
/**
* @returns {Promise<string | undefined>}
*/
@@ -838,7 +810,7 @@ class AgentClient extends BaseClient {
if (noSystemMessages === true && systemContent?.length) {
const latestMessageContent = _messages.pop().content;
if (typeof latestMessageContent !== 'string') {
if (typeof latestMessage !== 'string') {
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
_messages.push(new HumanMessage({ content: latestMessageContent }));
} else {
@@ -1030,11 +1002,12 @@ class AgentClient extends BaseClient {
});
try {
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
if (memoryPromise) {
const attachments = await memoryPromise;
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
}
}
await this.recordCollectedUsage({ context: 'message' });
} catch (err) {
logger.error(
@@ -1043,9 +1016,11 @@ class AgentClient extends BaseClient {
);
}
} catch (err) {
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
if (memoryPromise) {
const attachments = await memoryPromise;
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
}
}
logger.error(
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
@@ -1147,16 +1122,11 @@ class AgentClient extends BaseClient {
clientOptions.configuration = options.configOptions;
}
const shouldRemoveMaxTokens = /\b(o\d|gpt-[5-9])\b/i.test(clientOptions.model);
if (shouldRemoveMaxTokens && clientOptions.maxTokens != null) {
delete clientOptions.maxTokens;
} else if (!shouldRemoveMaxTokens && !clientOptions.maxTokens) {
// Ensure maxTokens is set for non-o1 models
if (!/\b(o\d)\b/i.test(clientOptions.model) && !clientOptions.maxTokens) {
clientOptions.maxTokens = 75;
}
if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_completion_tokens != null) {
delete clientOptions.modelKwargs.max_completion_tokens;
} else if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_output_tokens != null) {
delete clientOptions.modelKwargs.max_output_tokens;
} else if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
delete clientOptions.maxTokens;
}
clientOptions = Object.assign(

View File

@@ -728,239 +728,6 @@ describe('AgentClient - titleConvo', () => {
});
});
describe('getOptions method - GPT-5+ model handling', () => {
let mockReq;
let mockRes;
let mockAgent;
let mockOptions;
beforeEach(() => {
jest.clearAllMocks();
mockAgent = {
id: 'agent-123',
endpoint: EModelEndpoint.openAI,
provider: EModelEndpoint.openAI,
model_parameters: {
model: 'gpt-5',
},
};
mockReq = {
app: {
locals: {},
},
user: {
id: 'user-123',
},
};
mockRes = {};
mockOptions = {
req: mockReq,
res: mockRes,
agent: mockAgent,
};
client = new AgentClient(mockOptions);
});
it('should move maxTokens to modelKwargs.max_completion_tokens for GPT-5 models', () => {
const clientOptions = {
model: 'gpt-5',
maxTokens: 2048,
temperature: 0.7,
};
// Simulate the getOptions logic that handles GPT-5+ models
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs).toBeDefined();
expect(clientOptions.modelKwargs.max_completion_tokens).toBe(2048);
expect(clientOptions.temperature).toBe(0.7); // Other options should remain
});
it('should move maxTokens to modelKwargs.max_output_tokens for GPT-5 models with useResponsesApi', () => {
const clientOptions = {
model: 'gpt-5',
maxTokens: 2048,
temperature: 0.7,
useResponsesApi: true,
};
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
const paramName =
clientOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
clientOptions.modelKwargs[paramName] = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs).toBeDefined();
expect(clientOptions.modelKwargs.max_output_tokens).toBe(2048);
expect(clientOptions.temperature).toBe(0.7); // Other options should remain
});
it('should handle GPT-5+ models with existing modelKwargs', () => {
const clientOptions = {
model: 'gpt-6',
maxTokens: 1500,
temperature: 0.8,
modelKwargs: {
customParam: 'value',
},
};
// Simulate the getOptions logic
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs).toEqual({
customParam: 'value',
max_completion_tokens: 1500,
});
});
it('should not modify maxTokens for non-GPT-5+ models', () => {
const clientOptions = {
model: 'gpt-4',
maxTokens: 2048,
temperature: 0.7,
};
// Simulate the getOptions logic
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
// Should not be modified since it's GPT-4
expect(clientOptions.maxTokens).toBe(2048);
expect(clientOptions.modelKwargs).toBeUndefined();
});
it('should handle various GPT-5+ model formats', () => {
const testCases = [
{ model: 'gpt-5', shouldTransform: true },
{ model: 'gpt-5-turbo', shouldTransform: true },
{ model: 'gpt-6', shouldTransform: true },
{ model: 'gpt-7-preview', shouldTransform: true },
{ model: 'gpt-8', shouldTransform: true },
{ model: 'gpt-9-mini', shouldTransform: true },
{ model: 'gpt-4', shouldTransform: false },
{ model: 'gpt-4o', shouldTransform: false },
{ model: 'gpt-3.5-turbo', shouldTransform: false },
{ model: 'claude-3', shouldTransform: false },
];
testCases.forEach(({ model, shouldTransform }) => {
const clientOptions = {
model,
maxTokens: 1000,
};
// Simulate the getOptions logic
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
if (shouldTransform) {
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs?.max_completion_tokens).toBe(1000);
} else {
expect(clientOptions.maxTokens).toBe(1000);
expect(clientOptions.modelKwargs).toBeUndefined();
}
});
});
it('should not swap max token param for older models when using useResponsesApi', () => {
const testCases = [
{ model: 'gpt-5', shouldTransform: true },
{ model: 'gpt-5-turbo', shouldTransform: true },
{ model: 'gpt-6', shouldTransform: true },
{ model: 'gpt-7-preview', shouldTransform: true },
{ model: 'gpt-8', shouldTransform: true },
{ model: 'gpt-9-mini', shouldTransform: true },
{ model: 'gpt-4', shouldTransform: false },
{ model: 'gpt-4o', shouldTransform: false },
{ model: 'gpt-3.5-turbo', shouldTransform: false },
{ model: 'claude-3', shouldTransform: false },
];
testCases.forEach(({ model, shouldTransform }) => {
const clientOptions = {
model,
maxTokens: 1000,
useResponsesApi: true,
};
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
const paramName =
clientOptions.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
clientOptions.modelKwargs[paramName] = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
if (shouldTransform) {
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs?.max_output_tokens).toBe(1000);
} else {
expect(clientOptions.maxTokens).toBe(1000);
expect(clientOptions.modelKwargs).toBeUndefined();
}
});
});
it('should not transform if maxTokens is null or undefined', () => {
const testCases = [
{ model: 'gpt-5', maxTokens: null },
{ model: 'gpt-5', maxTokens: undefined },
{ model: 'gpt-6', maxTokens: 0 }, // Should transform even if 0
];
testCases.forEach(({ model, maxTokens }, index) => {
const clientOptions = {
model,
maxTokens,
temperature: 0.7,
};
// Simulate the getOptions logic
if (/\bgpt-[5-9]\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) {
clientOptions.modelKwargs = clientOptions.modelKwargs ?? {};
clientOptions.modelKwargs.max_completion_tokens = clientOptions.maxTokens;
delete clientOptions.maxTokens;
}
if (index < 2) {
// null or undefined cases
expect(clientOptions.maxTokens).toBe(maxTokens);
expect(clientOptions.modelKwargs).toBeUndefined();
} else {
// 0 case - should transform
expect(clientOptions.maxTokens).toBeUndefined();
expect(clientOptions.modelKwargs?.max_completion_tokens).toBe(0);
}
});
});
});
describe('runMemory method', () => {
let client;
let mockReq;

View File

@@ -233,26 +233,6 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
);
}
}
// Edge case: sendMessage completed but abort happened during sendCompletion
// We need to ensure a final event is sent
else if (!res.headersSent && !res.finished) {
logger.debug(
'[AgentController] Handling edge case: `sendMessage` completed but aborted during `sendCompletion`',
);
const finalResponse = { ...response };
finalResponse.error = true;
sendEvent(res, {
final: true,
conversation,
title: conversation.title,
requestMessage: userMessage,
responseMessage: finalResponse,
error: { message: 'Request was aborted during completion' },
});
res.end();
}
// Save user message if needed
if (!client.skipSaveUserMessage) {

View File

@@ -5,40 +5,30 @@ const { logger } = require('@librechat/data-schemas');
const { agentCreateSchema, agentUpdateSchema } = require('@librechat/api');
const {
Tools,
SystemRoles,
Constants,
FileSources,
ResourceType,
AccessRoleIds,
PrincipalType,
SystemRoles,
EToolResources,
PermissionBits,
actionDelimiter,
removeNullishValues,
} = require('librechat-data-provider');
const {
getListAgentsByAccess,
countPromotedAgents,
revertAgentVersion,
getAgent,
createAgent,
updateAgent,
deleteAgent,
getAgent,
getListAgents,
} = require('~/models/Agent');
const {
findPubliclyAccessibleResources,
findAccessibleResources,
hasPublicPermission,
grantPermission,
} = require('~/server/services/PermissionService');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
const { filterFile } = require('~/server/services/Files/process');
const { updateAction, getActions } = require('~/models/Action');
const { getCachedTools } = require('~/server/services/Config');
const { updateAgentProjects } = require('~/models/Agent');
const { getProjectByName } = require('~/models/Project');
const { revertAgentVersion } = require('~/models/Agent');
const { deleteFileByFilter } = require('~/models/File');
const { getCategoriesWithCounts } = require('~/models');
const systemTools = {
[Tools.execute_code]: true,
@@ -52,7 +42,7 @@ const systemTools = {
* @param {ServerRequest} req - The request object.
* @param {AgentCreateParams} req.body - The request body.
* @param {ServerResponse} res - The response object.
* @returns {Promise<Agent>} 201 - success response - application/json
* @returns {Agent} 201 - success response - application/json
*/
const createAgentHandler = async (req, res) => {
try {
@@ -77,27 +67,6 @@ const createAgentHandler = async (req, res) => {
}
const agent = await createAgent(agentData);
// Automatically grant owner permissions to the creator
try {
await grantPermission({
principalType: PrincipalType.USER,
principalId: userId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_OWNER,
grantedBy: userId,
});
logger.debug(
`[createAgent] Granted owner permissions to user ${userId} for agent ${agent.id}`,
);
} catch (permissionError) {
logger.error(
`[createAgent] Failed to grant owner permissions for agent ${agent.id}:`,
permissionError,
);
}
res.status(201).json(agent);
} catch (error) {
if (error instanceof z.ZodError) {
@@ -120,14 +89,21 @@ const createAgentHandler = async (req, res) => {
* @returns {Promise<Agent>} 200 - success response - application/json
* @returns {Error} 404 - Agent not found
*/
const getAgentHandler = async (req, res, expandProperties = false) => {
const getAgentHandler = async (req, res) => {
try {
const id = req.params.id;
const author = req.user.id;
// Permissions are validated by middleware before calling this function
// Simply load the agent by ID
const agent = await getAgent({ id });
let query = { id, author };
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, ['agentIds']);
if (globalProject && (globalProject.agentIds?.length ?? 0) > 0) {
query = {
$or: [{ id, $in: globalProject.agentIds }, query],
};
}
const agent = await getAgent(query);
if (!agent) {
return res.status(404).json({ error: 'Agent not found' });
@@ -144,45 +120,23 @@ const getAgentHandler = async (req, res, expandProperties = false) => {
}
agent.author = agent.author.toString();
// @deprecated - isCollaborative replaced by ACL permissions
agent.isCollaborative = !!agent.isCollaborative;
// Check if agent is public
const isPublic = await hasPublicPermission({
resourceType: ResourceType.AGENT,
resourceId: agent._id,
requiredPermissions: PermissionBits.VIEW,
});
agent.isPublic = isPublic;
if (agent.author !== author) {
delete agent.author;
}
if (!expandProperties) {
// VIEW permission: Basic agent info only
if (!agent.isCollaborative && agent.author !== author && req.user.role !== SystemRoles.ADMIN) {
return res.status(200).json({
_id: agent._id,
id: agent.id,
name: agent.name,
description: agent.description,
avatar: agent.avatar,
author: agent.author,
provider: agent.provider,
model: agent.model,
projectIds: agent.projectIds,
// @deprecated - isCollaborative replaced by ACL permissions
isCollaborative: agent.isCollaborative,
isPublic: agent.isPublic,
version: agent.version,
// Safe metadata
createdAt: agent.createdAt,
updatedAt: agent.updatedAt,
});
}
// EDIT permission: Full agent details including sensitive configuration
return res.status(200).json(agent);
} catch (error) {
logger.error('[/Agents/:id] Error retrieving agent', error);
@@ -203,22 +157,42 @@ const updateAgentHandler = async (req, res) => {
try {
const id = req.params.id;
const validatedData = agentUpdateSchema.parse(req.body);
const { _id, ...updateData } = removeNullishValues(validatedData);
const { projectIds, removeProjectIds, ...updateData } = removeNullishValues(validatedData);
const isAdmin = req.user.role === SystemRoles.ADMIN;
const existingAgent = await getAgent({ id });
if (!existingAgent) {
return res.status(404).json({ error: 'Agent not found' });
}
const isAuthor = existingAgent.author.toString() === req.user.id;
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
if (!hasEditPermission) {
return res.status(403).json({
error: 'You do not have permission to modify this non-collaborative agent',
});
}
/** @type {boolean} */
const isProjectUpdate = (projectIds?.length ?? 0) > 0 || (removeProjectIds?.length ?? 0) > 0;
let updatedAgent =
Object.keys(updateData).length > 0
? await updateAgent({ id }, updateData, {
updatingUserId: req.user.id,
skipVersioning: isProjectUpdate,
})
: existingAgent;
// Add version count to the response
updatedAgent.version = updatedAgent.versions ? updatedAgent.versions.length : 0;
if (isProjectUpdate) {
updatedAgent = await updateAgentProjects({
user: req.user,
agentId: id,
projectIds,
removeProjectIds,
});
}
if (updatedAgent.author) {
updatedAgent.author = updatedAgent.author.toString();
@@ -344,26 +318,6 @@ const duplicateAgentHandler = async (req, res) => {
newAgentData.actions = agentActions;
const newAgent = await createAgent(newAgentData);
// Automatically grant owner permissions to the duplicator
try {
await grantPermission({
principalType: PrincipalType.USER,
principalId: userId,
resourceType: ResourceType.AGENT,
resourceId: newAgent._id,
accessRoleId: AccessRoleIds.AGENT_OWNER,
grantedBy: userId,
});
logger.debug(
`[duplicateAgent] Granted owner permissions to user ${userId} for duplicated agent ${newAgent.id}`,
);
} catch (permissionError) {
logger.error(
`[duplicateAgent] Failed to grant owner permissions for duplicated agent ${newAgent.id}:`,
permissionError,
);
}
return res.status(201).json({
agent: newAgent,
actions: newActionsList,
@@ -390,7 +344,7 @@ const deleteAgentHandler = async (req, res) => {
if (!agent) {
return res.status(404).json({ error: 'Agent not found' });
}
await deleteAgent({ id });
await deleteAgent({ id, author: req.user.id });
return res.json({ message: 'Agent deleted' });
} catch (error) {
logger.error('[/Agents/:id] Error deleting Agent', error);
@@ -399,7 +353,7 @@ const deleteAgentHandler = async (req, res) => {
};
/**
* Lists agents using ACL-aware permissions (ownership + explicit shares).
*
* @route GET /Agents
* @param {object} req - Express Request
* @param {object} req.query - Request query
@@ -408,65 +362,9 @@ const deleteAgentHandler = async (req, res) => {
*/
const getListAgentsHandler = async (req, res) => {
try {
const userId = req.user.id;
const { category, search, limit, cursor, promoted } = req.query;
let requiredPermission = req.query.requiredPermission;
if (typeof requiredPermission === 'string') {
requiredPermission = parseInt(requiredPermission, 10);
if (isNaN(requiredPermission)) {
requiredPermission = PermissionBits.VIEW;
}
} else if (typeof requiredPermission !== 'number') {
requiredPermission = PermissionBits.VIEW;
}
// Base filter
const filter = {};
// Handle category filter - only apply if category is defined
if (category !== undefined && category.trim() !== '') {
filter.category = category;
}
// Handle promoted filter - only from query param
if (promoted === '1') {
filter.is_promoted = true;
} else if (promoted === '0') {
filter.is_promoted = { $ne: true };
}
// Handle search filter
if (search && search.trim() !== '') {
filter.$or = [
{ name: { $regex: search.trim(), $options: 'i' } },
{ description: { $regex: search.trim(), $options: 'i' } },
];
}
// Get agent IDs the user has VIEW access to via ACL
const accessibleIds = await findAccessibleResources({
userId,
role: req.user.role,
resourceType: ResourceType.AGENT,
requiredPermissions: requiredPermission,
const data = await getListAgents({
author: req.user.id,
});
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
resourceType: ResourceType.AGENT,
requiredPermissions: PermissionBits.VIEW,
});
// Use the new ACL-aware function
const data = await getListAgentsByAccess({
accessibleIds,
otherParams: filter,
limit,
after: cursor,
});
if (data?.data?.length) {
data.data = data.data.map((agent) => {
if (publiclyAccessibleIds.some((id) => id.equals(agent._id))) {
agent.isPublic = true;
}
return agent;
});
}
return res.json(data);
} catch (error) {
logger.error('[/Agents] Error listing Agents', error);
@@ -500,7 +398,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
return res.status(404).json({ error: 'Agent not found' });
}
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
const isAuthor = existingAgent.author.toString() === req.user.id;
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
if (!hasEditPermission) {
@@ -511,7 +409,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
const buffer = await fs.readFile(req.file.path);
const fileStrategy = getFileStrategy(req.app.locals, { isAvatar: true });
const fileStrategy = req.app.locals.fileStrategy;
const resizedBuffer = await resizeAvatar({
userId: req.user.id,
@@ -608,7 +506,7 @@ const revertAgentVersionHandler = async (req, res) => {
return res.status(404).json({ error: 'Agent not found' });
}
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
const isAuthor = existingAgent.author.toString() === req.user.id;
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
if (!hasEditPermission) {
@@ -633,48 +531,7 @@ const revertAgentVersionHandler = async (req, res) => {
res.status(500).json({ error: error.message });
}
};
/**
* Get all agent categories with counts
*
* @param {Object} _req - Express request object (unused)
* @param {Object} res - Express response object
*/
const getAgentCategories = async (_req, res) => {
try {
const categories = await getCategoriesWithCounts();
const promotedCount = await countPromotedAgents();
const formattedCategories = categories.map((category) => ({
value: category.value,
label: category.label,
count: category.agentCount,
description: category.description,
}));
if (promotedCount > 0) {
formattedCategories.unshift({
value: 'promoted',
label: 'Promoted',
count: promotedCount,
description: 'Our recommended agents',
});
}
formattedCategories.push({
value: 'all',
label: 'All',
description: 'All available agents',
});
res.status(200).json(formattedCategories);
} catch (error) {
logger.error('[/Agents/Marketplace] Error fetching agent categories:', error);
res.status(500).json({
error: 'Failed to fetch agent categories',
userMessage: 'Unable to load categories. Please refresh the page.',
suggestion: 'Try refreshing the page or check your network connection',
});
}
};
module.exports = {
createAgent: createAgentHandler,
getAgent: getAgentHandler,
@@ -684,5 +541,4 @@ module.exports = {
getListAgents: getListAgentsHandler,
uploadAgentAvatar: uploadAgentAvatarHandler,
revertAgentVersion: revertAgentVersionHandler,
getAgentCategories,
};

View File

@@ -1,6 +1,5 @@
const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { nanoid } = require('nanoid');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { agentSchema } = require('@librechat/data-schemas');
@@ -42,27 +41,7 @@ jest.mock('~/models/File', () => ({
deleteFileByFilter: jest.fn(),
}));
jest.mock('~/server/services/PermissionService', () => ({
findAccessibleResources: jest.fn().mockResolvedValue([]),
findPubliclyAccessibleResources: jest.fn().mockResolvedValue([]),
grantPermission: jest.fn(),
hasPublicPermission: jest.fn().mockResolvedValue(false),
}));
jest.mock('~/models', () => ({
getCategoriesWithCounts: jest.fn(),
}));
const {
createAgent: createAgentHandler,
updateAgent: updateAgentHandler,
getListAgents: getListAgentsHandler,
} = require('./v1');
const {
findAccessibleResources,
findPubliclyAccessibleResources,
} = require('~/server/services/PermissionService');
const { createAgent: createAgentHandler, updateAgent: updateAgentHandler } = require('./v1');
/**
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
@@ -100,7 +79,6 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
},
body: {},
params: {},
query: {},
app: {
locals: {
fileStrategy: 'local',
@@ -257,81 +235,6 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
expect(agentInDb.tool_resources.invalid_resource).toBeUndefined();
});
test('should handle support_contact with empty strings', async () => {
const dataWithEmptyContact = {
provider: 'openai',
model: 'gpt-4',
name: 'Agent with Empty Contact',
support_contact: {
name: '',
email: '',
},
};
mockReq.body = dataWithEmptyContact;
await createAgentHandler(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(201);
const createdAgent = mockRes.json.mock.calls[0][0];
expect(createdAgent.name).toBe('Agent with Empty Contact');
expect(createdAgent.support_contact).toBeDefined();
expect(createdAgent.support_contact.name).toBe('');
expect(createdAgent.support_contact.email).toBe('');
});
test('should handle support_contact with valid email', async () => {
const dataWithValidContact = {
provider: 'openai',
model: 'gpt-4',
name: 'Agent with Valid Contact',
support_contact: {
name: 'Support Team',
email: 'support@example.com',
},
};
mockReq.body = dataWithValidContact;
await createAgentHandler(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(201);
const createdAgent = mockRes.json.mock.calls[0][0];
expect(createdAgent.support_contact).toBeDefined();
expect(createdAgent.support_contact.name).toBe('Support Team');
expect(createdAgent.support_contact.email).toBe('support@example.com');
});
test('should reject support_contact with invalid email', async () => {
const dataWithInvalidEmail = {
provider: 'openai',
model: 'gpt-4',
name: 'Agent with Invalid Email',
support_contact: {
name: 'Support',
email: 'not-an-email',
},
};
mockReq.body = dataWithInvalidEmail;
await createAgentHandler(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(400);
expect(mockRes.json).toHaveBeenCalledWith(
expect.objectContaining({
error: 'Invalid request data',
details: expect.arrayContaining([
expect.objectContaining({
path: ['support_contact', 'email'],
}),
]),
}),
);
});
test('should handle avatar validation', async () => {
const dataWithAvatar = {
provider: 'openai',
@@ -469,6 +372,52 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
expect(agentInDb.id).toBe(existingAgentId);
});
test('should reject update from non-author when not collaborative', async () => {
const differentUserId = new mongoose.Types.ObjectId().toString();
mockReq.user.id = differentUserId; // Different user
mockReq.params.id = existingAgentId;
mockReq.body = {
name: 'Unauthorized Update',
};
await updateAgentHandler(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(403);
expect(mockRes.json).toHaveBeenCalledWith({
error: 'You do not have permission to modify this non-collaborative agent',
});
// Verify agent was not modified in database
const agentInDb = await Agent.findOne({ id: existingAgentId });
expect(agentInDb.name).toBe('Original Agent');
});
test('should allow update from non-author when collaborative', async () => {
// First make the agent collaborative
await Agent.updateOne({ id: existingAgentId }, { isCollaborative: true });
const differentUserId = new mongoose.Types.ObjectId().toString();
mockReq.user.id = differentUserId; // Different user
mockReq.params.id = existingAgentId;
mockReq.body = {
name: 'Collaborative Update',
};
await updateAgentHandler(mockReq, mockRes);
expect(mockRes.status).not.toHaveBeenCalledWith(403);
expect(mockRes.json).toHaveBeenCalled();
const updatedAgent = mockRes.json.mock.calls[0][0];
expect(updatedAgent.name).toBe('Collaborative Update');
// Author field should be removed for non-author
expect(updatedAgent.author).toBeUndefined();
// Verify in database
const agentInDb = await Agent.findOne({ id: existingAgentId });
expect(agentInDb.name).toBe('Collaborative Update');
});
test('should allow admin to update any agent', async () => {
const adminUserId = new mongoose.Types.ObjectId().toString();
mockReq.user.id = adminUserId;
@@ -549,28 +498,6 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
expect(mockRes.json).toHaveBeenCalledWith({ error: 'Agent not found' });
});
test('should include version field in update response', async () => {
mockReq.user.id = existingAgentAuthorId.toString();
mockReq.params.id = existingAgentId;
mockReq.body = {
name: 'Updated with Version Check',
};
await updateAgentHandler(mockReq, mockRes);
expect(mockRes.json).toHaveBeenCalled();
const updatedAgent = mockRes.json.mock.calls[0][0];
// Verify version field is included and is a number
expect(updatedAgent).toHaveProperty('version');
expect(typeof updatedAgent.version).toBe('number');
expect(updatedAgent.version).toBeGreaterThanOrEqual(1);
// Verify in database
const agentInDb = await Agent.findOne({ id: existingAgentId });
expect(updatedAgent.version).toBe(agentInDb.versions.length);
});
test('should handle validation errors properly', async () => {
mockReq.user.id = existingAgentAuthorId.toString();
mockReq.params.id = existingAgentId;
@@ -628,6 +555,45 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
expect(agentInDb.__v).not.toBe(99);
});
test('should prevent privilege escalation through isCollaborative', async () => {
// Create a non-collaborative agent
const authorId = new mongoose.Types.ObjectId();
const agent = await Agent.create({
id: `agent_${uuidv4()}`,
name: 'Private Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
isCollaborative: false,
versions: [
{
name: 'Private Agent',
provider: 'openai',
model: 'gpt-4',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
// Try to make it collaborative as a different user
const attackerId = new mongoose.Types.ObjectId().toString();
mockReq.user.id = attackerId;
mockReq.params.id = agent.id;
mockReq.body = {
isCollaborative: true, // Trying to escalate privileges
};
await updateAgentHandler(mockReq, mockRes);
// Should be rejected
expect(mockRes.status).toHaveBeenCalledWith(403);
// Verify in database that it's still not collaborative
const agentInDb = await Agent.findOne({ id: agent.id });
expect(agentInDb.isCollaborative).toBe(false);
});
test('should prevent author hijacking', async () => {
const originalAuthorId = new mongoose.Types.ObjectId();
const attackerId = new mongoose.Types.ObjectId();
@@ -690,373 +656,4 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
expect(agentInDb.futureFeature).toBeUndefined();
});
});
describe('getListAgentsHandler - Security Tests', () => {
let userA, userB;
let agentA1, agentA2, agentA3, agentB1;
beforeEach(async () => {
await Agent.deleteMany({});
jest.clearAllMocks();
// Create two test users
userA = new mongoose.Types.ObjectId();
userB = new mongoose.Types.ObjectId();
// Create agents for User A
agentA1 = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Agent A1',
description: 'User A agent 1',
provider: 'openai',
model: 'gpt-4',
author: userA,
versions: [
{
name: 'Agent A1',
description: 'User A agent 1',
provider: 'openai',
model: 'gpt-4',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
agentA2 = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Agent A2',
description: 'User A agent 2',
provider: 'openai',
model: 'gpt-4',
author: userA,
versions: [
{
name: 'Agent A2',
description: 'User A agent 2',
provider: 'openai',
model: 'gpt-4',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
agentA3 = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Agent A3',
description: 'User A agent 3',
provider: 'openai',
model: 'gpt-4',
author: userA,
category: 'productivity',
versions: [
{
name: 'Agent A3',
description: 'User A agent 3',
provider: 'openai',
model: 'gpt-4',
category: 'productivity',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
// Create an agent for User B
agentB1 = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Agent B1',
description: 'User B agent 1',
provider: 'openai',
model: 'gpt-4',
author: userB,
versions: [
{
name: 'Agent B1',
description: 'User B agent 1',
provider: 'openai',
model: 'gpt-4',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
});
test('should return empty list when user has no accessible agents', async () => {
// User B has no permissions and no owned agents
mockReq.user.id = userB.toString();
findAccessibleResources.mockResolvedValue([]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
expect(findAccessibleResources).toHaveBeenCalledWith({
userId: userB.toString(),
role: 'USER',
resourceType: 'agent',
requiredPermissions: 1, // VIEW permission
});
expect(mockRes.json).toHaveBeenCalledWith({
object: 'list',
data: [],
first_id: null,
last_id: null,
has_more: false,
after: null,
});
});
test('should not return other users agents when accessibleIds is empty', async () => {
// User B trying to see agents with no permissions
mockReq.user.id = userB.toString();
findAccessibleResources.mockResolvedValue([]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(0);
// Verify User A's agents are not included
const agentIds = response.data.map((a) => a.id);
expect(agentIds).not.toContain(agentA1.id);
expect(agentIds).not.toContain(agentA2.id);
expect(agentIds).not.toContain(agentA3.id);
});
test('should only return agents user has access to', async () => {
// User B has access to one of User A's agents
mockReq.user.id = userB.toString();
findAccessibleResources.mockResolvedValue([agentA1._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
expect(response.data[0].id).toBe(agentA1.id);
expect(response.data[0].name).toBe('Agent A1');
});
test('should return multiple accessible agents', async () => {
// User B has access to multiple agents
mockReq.user.id = userB.toString();
findAccessibleResources.mockResolvedValue([agentA1._id, agentA3._id, agentB1._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(3);
const agentIds = response.data.map((a) => a.id);
expect(agentIds).toContain(agentA1.id);
expect(agentIds).toContain(agentA3.id);
expect(agentIds).toContain(agentB1.id);
expect(agentIds).not.toContain(agentA2.id);
});
test('should apply category filter correctly with ACL', async () => {
// User has access to all agents but filters by category
mockReq.user.id = userB.toString();
mockReq.query.category = 'productivity';
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
expect(response.data[0].id).toBe(agentA3.id);
expect(response.data[0].category).toBe('productivity');
});
test('should apply search filter correctly with ACL', async () => {
// User has access to multiple agents but searches for specific one
mockReq.user.id = userB.toString();
mockReq.query.search = 'A2';
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, agentA3._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
expect(response.data[0].id).toBe(agentA2.id);
});
test('should handle pagination with ACL filtering', async () => {
// Create more agents for pagination testing
const moreAgents = [];
for (let i = 4; i <= 10; i++) {
const agent = await Agent.create({
id: `agent_${nanoid(12)}`,
name: `Agent A${i}`,
description: `User A agent ${i}`,
provider: 'openai',
model: 'gpt-4',
author: userA,
versions: [
{
name: `Agent A${i}`,
description: `User A agent ${i}`,
provider: 'openai',
model: 'gpt-4',
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
moreAgents.push(agent);
}
// User has access to all agents
const allAgentIds = [agentA1, agentA2, agentA3, ...moreAgents].map((a) => a._id);
mockReq.user.id = userB.toString();
mockReq.query.limit = '5';
findAccessibleResources.mockResolvedValue(allAgentIds);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(5);
expect(response.has_more).toBe(true);
expect(response.after).toBeTruthy();
});
test('should mark publicly accessible agents', async () => {
// User has access to agents, some are public
mockReq.user.id = userB.toString();
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id]);
findPubliclyAccessibleResources.mockResolvedValue([agentA2._id]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(2);
const publicAgent = response.data.find((a) => a.id === agentA2.id);
const privateAgent = response.data.find((a) => a.id === agentA1.id);
expect(publicAgent.isPublic).toBe(true);
expect(privateAgent.isPublic).toBeUndefined();
});
test('should handle requiredPermission parameter', async () => {
// Test with different permission levels
mockReq.user.id = userB.toString();
mockReq.query.requiredPermission = '15'; // FULL_ACCESS
findAccessibleResources.mockResolvedValue([agentA1._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
expect(findAccessibleResources).toHaveBeenCalledWith({
userId: userB.toString(),
role: 'USER',
resourceType: 'agent',
requiredPermissions: 15,
});
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
});
test('should handle promoted filter with ACL', async () => {
// Create a promoted agent
const promotedAgent = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Promoted Agent',
description: 'A promoted agent',
provider: 'openai',
model: 'gpt-4',
author: userA,
is_promoted: true,
versions: [
{
name: 'Promoted Agent',
description: 'A promoted agent',
provider: 'openai',
model: 'gpt-4',
is_promoted: true,
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
mockReq.user.id = userB.toString();
mockReq.query.promoted = '1';
findAccessibleResources.mockResolvedValue([agentA1._id, agentA2._id, promotedAgent._id]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
expect(response.data[0].id).toBe(promotedAgent.id);
expect(response.data[0].is_promoted).toBe(true);
});
test('should handle errors gracefully', async () => {
mockReq.user.id = userB.toString();
findAccessibleResources.mockRejectedValue(new Error('Permission service error'));
await getListAgentsHandler(mockReq, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.json).toHaveBeenCalledWith({
error: 'Permission service error',
});
});
test('should respect combined filters with ACL', async () => {
// Create agents with specific attributes
const productivityPromoted = await Agent.create({
id: `agent_${nanoid(12)}`,
name: 'Productivity Pro',
description: 'A promoted productivity agent',
provider: 'openai',
model: 'gpt-4',
author: userA,
category: 'productivity',
is_promoted: true,
versions: [
{
name: 'Productivity Pro',
description: 'A promoted productivity agent',
provider: 'openai',
model: 'gpt-4',
category: 'productivity',
is_promoted: true,
createdAt: new Date(),
updatedAt: new Date(),
},
],
});
mockReq.user.id = userB.toString();
mockReq.query.category = 'productivity';
mockReq.query.promoted = '1';
findAccessibleResources.mockResolvedValue([
agentA1._id,
agentA2._id,
agentA3._id,
productivityPromoted._id,
]);
findPubliclyAccessibleResources.mockResolvedValue([]);
await getListAgentsHandler(mockReq, mockRes);
const response = mockRes.json.mock.calls[0][0];
expect(response.data).toHaveLength(1);
expect(response.data[0].id).toBe(productivityPromoted.id);
expect(response.data[0].category).toBe('productivity');
expect(response.data[0].is_promoted).toBe(true);
});
});
});

View File

@@ -8,12 +8,14 @@ const express = require('express');
const passport = require('passport');
const compression = require('compression');
const cookieParser = require('cookie-parser');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const mongoSanitize = require('express-mongo-sanitize');
const { isEnabled, ErrorController } = require('@librechat/api');
const { connectDb, indexSync } = require('~/db');
const validateImageRequest = require('./middleware/validateImageRequest');
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
const errorController = require('./controllers/ErrorController');
const initializeMCPs = require('./services/initializeMCPs');
const configureSocialLogins = require('./socialLogins');
const AppService = require('./services/AppService');
@@ -115,12 +117,11 @@ const startServer = async () => {
app.use('/api/agents', routes.agents);
app.use('/api/banner', routes.banner);
app.use('/api/memories', routes.memories);
app.use('/api/permissions', routes.accessPermissions);
app.use('/api/tags', routes.tags);
app.use('/api/mcp', routes.mcp);
app.use(ErrorController);
// Add the error controller one more time after all routes
app.use(errorController);
app.use((req, res) => {
res.set({

View File

@@ -92,7 +92,7 @@ async function healthCheckPoll(app, retries = 0) {
if (response.status === 200) {
return; // App is healthy
}
} catch {
} catch (error) {
// Ignore connection errors during polling
}

View File

@@ -1,97 +0,0 @@
const { logger } = require('@librechat/data-schemas');
const { Constants, isAgentsEndpoint, ResourceType } = require('librechat-data-provider');
const { canAccessResource } = require('./canAccessResource');
const { getAgent } = require('~/models/Agent');
/**
* Agent ID resolver function for agent_id from request body
* Resolves custom agent ID (e.g., "agent_abc123") to MongoDB ObjectId
* This is used specifically for chat routes where agent_id comes from request body
*
* @param {string} agentCustomId - Custom agent ID from request body
* @returns {Promise<Object|null>} Agent document with _id field, or null if not found
*/
const resolveAgentIdFromBody = async (agentCustomId) => {
// Handle ephemeral agents - they don't need permission checks
if (agentCustomId === Constants.EPHEMERAL_AGENT_ID) {
return null; // No permission check needed for ephemeral agents
}
return await getAgent({ id: agentCustomId });
};
/**
* Middleware factory that creates middleware to check agent access permissions from request body.
* This middleware is specifically designed for chat routes where the agent_id comes from req.body
* instead of route parameters.
*
* @param {Object} options - Configuration options
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
* @returns {Function} Express middleware function
*
* @example
* // Basic usage for agent chat (requires VIEW permission)
* router.post('/chat',
* canAccessAgentFromBody({ requiredPermission: PermissionBits.VIEW }),
* buildEndpointOption,
* chatController
* );
*/
const canAccessAgentFromBody = (options) => {
const { requiredPermission } = options;
// Validate required options
if (!requiredPermission || typeof requiredPermission !== 'number') {
throw new Error('canAccessAgentFromBody: requiredPermission is required and must be a number');
}
return async (req, res, next) => {
try {
const { endpoint, agent_id } = req.body;
let agentId = agent_id;
if (!isAgentsEndpoint(endpoint)) {
agentId = Constants.EPHEMERAL_AGENT_ID;
}
if (!agentId) {
return res.status(400).json({
error: 'Bad Request',
message: 'agent_id is required in request body',
});
}
// Skip permission checks for ephemeral agents
if (agentId === Constants.EPHEMERAL_AGENT_ID) {
return next();
}
const agentAccessMiddleware = canAccessResource({
resourceType: ResourceType.AGENT,
requiredPermission,
resourceIdParam: 'agent_id', // This will be ignored since we use custom resolver
idResolver: () => resolveAgentIdFromBody(agentId),
});
const tempReq = {
...req,
params: {
...req.params,
agent_id: agentId,
},
};
return agentAccessMiddleware(tempReq, res, next);
} catch (error) {
logger.error('Failed to validate agent access permissions', error);
return res.status(500).json({
error: 'Internal Server Error',
message: 'Failed to validate agent access permissions',
});
}
};
};
module.exports = {
canAccessAgentFromBody,
};

View File

@@ -1,59 +0,0 @@
const { ResourceType } = require('librechat-data-provider');
const { canAccessResource } = require('./canAccessResource');
const { getAgent } = require('~/models/Agent');
/**
* Agent ID resolver function
* Resolves custom agent ID (e.g., "agent_abc123") to MongoDB ObjectId
*
* @param {string} agentCustomId - Custom agent ID from route parameter
* @returns {Promise<Object|null>} Agent document with _id field, or null if not found
*/
const resolveAgentId = async (agentCustomId) => {
return await getAgent({ id: agentCustomId });
};
/**
* Agent-specific middleware factory that creates middleware to check agent access permissions.
* This middleware extends the generic canAccessResource to handle agent custom ID resolution.
*
* @param {Object} options - Configuration options
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
* @param {string} [options.resourceIdParam='id'] - The name of the route parameter containing the agent custom ID
* @returns {Function} Express middleware function
*
* @example
* // Basic usage for viewing agents
* router.get('/agents/:id',
* canAccessAgentResource({ requiredPermission: 1 }),
* getAgent
* );
*
* @example
* // Custom resource ID parameter and edit permission
* router.patch('/agents/:agent_id',
* canAccessAgentResource({
* requiredPermission: 2,
* resourceIdParam: 'agent_id'
* }),
* updateAgent
* );
*/
const canAccessAgentResource = (options) => {
const { requiredPermission, resourceIdParam = 'id' } = options;
if (!requiredPermission || typeof requiredPermission !== 'number') {
throw new Error('canAccessAgentResource: requiredPermission is required and must be a number');
}
return canAccessResource({
resourceType: ResourceType.AGENT,
requiredPermission,
resourceIdParam,
idResolver: resolveAgentId,
});
};
module.exports = {
canAccessAgentResource,
};

View File

@@ -1,385 +0,0 @@
const mongoose = require('mongoose');
const { ResourceType, PrincipalType, PrincipalModel } = require('librechat-data-provider');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { canAccessAgentResource } = require('./canAccessAgentResource');
const { User, Role, AclEntry } = require('~/db/models');
const { createAgent } = require('~/models/Agent');
describe('canAccessAgentResource middleware', () => {
let mongoServer;
let req, res, next;
let testUser;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
await mongoose.connection.dropDatabase();
await Role.create({
name: 'test-role',
permissions: {
AGENTS: {
USE: true,
CREATE: true,
SHARED_GLOBAL: false,
},
},
});
// Create a test user
testUser = await User.create({
email: 'test@example.com',
name: 'Test User',
username: 'testuser',
role: 'test-role',
});
req = {
user: { id: testUser._id, role: testUser.role },
params: {},
};
res = {
status: jest.fn().mockReturnThis(),
json: jest.fn(),
};
next = jest.fn();
jest.clearAllMocks();
});
describe('middleware factory', () => {
test('should throw error if requiredPermission is not provided', () => {
expect(() => canAccessAgentResource({})).toThrow(
'canAccessAgentResource: requiredPermission is required and must be a number',
);
});
test('should throw error if requiredPermission is not a number', () => {
expect(() => canAccessAgentResource({ requiredPermission: '1' })).toThrow(
'canAccessAgentResource: requiredPermission is required and must be a number',
);
});
test('should create middleware with default resourceIdParam', () => {
const middleware = canAccessAgentResource({ requiredPermission: 1 });
expect(typeof middleware).toBe('function');
expect(middleware.length).toBe(3); // Express middleware signature
});
test('should create middleware with custom resourceIdParam', () => {
const middleware = canAccessAgentResource({
requiredPermission: 2,
resourceIdParam: 'agent_id',
});
expect(typeof middleware).toBe('function');
expect(middleware.length).toBe(3);
});
});
describe('permission checking with real agents', () => {
test('should allow access when user is the agent author', async () => {
// Create an agent owned by the test user
const agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: testUser._id,
});
// Create ACL entry for the author (owner permissions)
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 15, // All permissions (1+2+4+8)
grantedBy: testUser._id,
});
req.params.id = agent.id;
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
test('should deny access when user is not the author and has no ACL entry', async () => {
// Create an agent owned by a different user
const otherUser = await User.create({
email: 'other@example.com',
name: 'Other User',
username: 'otheruser',
role: 'test-role',
});
const agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Other User Agent',
provider: 'openai',
model: 'gpt-4',
author: otherUser._id,
});
// Create ACL entry for the other user (owner)
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: otherUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 15, // All permissions
grantedBy: otherUser._id,
});
req.params.id = agent.id;
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to access this agent',
});
});
test('should allow access when user has ACL entry with sufficient permissions', async () => {
// Create an agent owned by a different user
const otherUser = await User.create({
email: 'other2@example.com',
name: 'Other User 2',
username: 'otheruser2',
role: 'test-role',
});
const agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Shared Agent',
provider: 'openai',
model: 'gpt-4',
author: otherUser._id,
});
// Create ACL entry granting view permission to test user
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 1, // VIEW permission
grantedBy: otherUser._id,
});
req.params.id = agent.id;
const middleware = canAccessAgentResource({ requiredPermission: 1 }); // VIEW permission
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
test('should deny access when ACL permissions are insufficient', async () => {
// Create an agent owned by a different user
const otherUser = await User.create({
email: 'other3@example.com',
name: 'Other User 3',
username: 'otheruser3',
role: 'test-role',
});
const agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Limited Access Agent',
provider: 'openai',
model: 'gpt-4',
author: otherUser._id,
});
// Create ACL entry granting only view permission
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 1, // VIEW permission only
grantedBy: otherUser._id,
});
req.params.id = agent.id;
const middleware = canAccessAgentResource({ requiredPermission: 2 }); // EDIT permission required
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to access this agent',
});
});
test('should handle non-existent agent', async () => {
req.params.id = 'agent_nonexistent';
const middleware = canAccessAgentResource({ requiredPermission: 1 });
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(404);
expect(res.json).toHaveBeenCalledWith({
error: 'Not Found',
message: 'agent not found',
});
});
test('should use custom resourceIdParam', async () => {
const agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Custom Param Agent',
provider: 'openai',
model: 'gpt-4',
author: testUser._id,
});
// Create ACL entry for the author
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 15, // All permissions
grantedBy: testUser._id,
});
req.params.agent_id = agent.id; // Using custom param name
const middleware = canAccessAgentResource({
requiredPermission: 1,
resourceIdParam: 'agent_id',
});
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
});
describe('permission levels', () => {
let agent;
beforeEach(async () => {
agent = await createAgent({
id: `agent_${Date.now()}`,
name: 'Permission Test Agent',
provider: 'openai',
model: 'gpt-4',
author: testUser._id,
});
// Create ACL entry with all permissions for the owner
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 15, // All permissions (1+2+4+8)
grantedBy: testUser._id,
});
req.params.id = agent.id;
});
test('should support view permission (1)', async () => {
const middleware = canAccessAgentResource({ requiredPermission: 1 });
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
test('should support edit permission (2)', async () => {
const middleware = canAccessAgentResource({ requiredPermission: 2 });
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
test('should support delete permission (4)', async () => {
const middleware = canAccessAgentResource({ requiredPermission: 4 });
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
test('should support share permission (8)', async () => {
const middleware = canAccessAgentResource({ requiredPermission: 8 });
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
test('should support combined permissions', async () => {
const viewAndEdit = 1 | 2; // 3
const middleware = canAccessAgentResource({ requiredPermission: viewAndEdit });
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
});
describe('integration with agent operations', () => {
test('should work with agent CRUD operations', async () => {
const agentId = `agent_${Date.now()}`;
// Create agent
const agent = await createAgent({
id: agentId,
name: 'Integration Test Agent',
provider: 'openai',
model: 'gpt-4',
author: testUser._id,
description: 'Testing integration',
});
// Create ACL entry for the author
await AclEntry.create({
principalType: PrincipalType.USER,
principalId: testUser._id,
principalModel: PrincipalModel.USER,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
permBits: 15, // All permissions
grantedBy: testUser._id,
});
req.params.id = agentId;
// Test view access
const viewMiddleware = canAccessAgentResource({ requiredPermission: 1 });
await viewMiddleware(req, res, next);
expect(next).toHaveBeenCalled();
jest.clearAllMocks();
// Update the agent
const { updateAgent } = require('~/models/Agent');
await updateAgent({ id: agentId }, { description: 'Updated description' });
// Test edit access
const editMiddleware = canAccessAgentResource({ requiredPermission: 2 });
await editMiddleware(req, res, next);
expect(next).toHaveBeenCalled();
});
});
});

View File

@@ -1,61 +0,0 @@
const { ResourceType } = require('librechat-data-provider');
const { canAccessResource } = require('./canAccessResource');
const { getPromptGroup } = require('~/models/Prompt');
/**
* PromptGroup ID resolver function
* Resolves promptGroup ID to MongoDB ObjectId
*
* @param {string} groupId - PromptGroup ID from route parameter
* @returns {Promise<Object|null>} PromptGroup document with _id field, or null if not found
*/
const resolvePromptGroupId = async (groupId) => {
return await getPromptGroup({ _id: groupId });
};
/**
* PromptGroup-specific middleware factory that creates middleware to check promptGroup access permissions.
* This middleware extends the generic canAccessResource to handle promptGroup ID resolution.
*
* @param {Object} options - Configuration options
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
* @param {string} [options.resourceIdParam='groupId'] - The name of the route parameter containing the promptGroup ID
* @returns {Function} Express middleware function
*
* @example
* // Basic usage for viewing promptGroups
* router.get('/prompts/groups/:groupId',
* canAccessPromptGroupResource({ requiredPermission: 1 }),
* getPromptGroup
* );
*
* @example
* // Custom resource ID parameter and edit permission
* router.patch('/prompts/groups/:id',
* canAccessPromptGroupResource({
* requiredPermission: 2,
* resourceIdParam: 'id'
* }),
* updatePromptGroup
* );
*/
const canAccessPromptGroupResource = (options) => {
const { requiredPermission, resourceIdParam = 'groupId' } = options;
if (!requiredPermission || typeof requiredPermission !== 'number') {
throw new Error(
'canAccessPromptGroupResource: requiredPermission is required and must be a number',
);
}
return canAccessResource({
resourceType: ResourceType.PROMPTGROUP,
requiredPermission,
resourceIdParam,
idResolver: resolvePromptGroupId,
});
};
module.exports = {
canAccessPromptGroupResource,
};

View File

@@ -1,55 +0,0 @@
const { ResourceType } = require('librechat-data-provider');
const { canAccessResource } = require('./canAccessResource');
const { getPrompt } = require('~/models/Prompt');
/**
* Prompt to PromptGroup ID resolver function
* Resolves prompt ID to its parent promptGroup ID
*
* @param {string} promptId - Prompt ID from route parameter
* @returns {Promise<Object|null>} Object with promptGroup's _id field, or null if not found
*/
const resolvePromptToGroupId = async (promptId) => {
const prompt = await getPrompt({ _id: promptId });
if (!prompt || !prompt.groupId) {
return null;
}
// Return an object with _id that matches the promptGroup ID
return { _id: prompt.groupId };
};
/**
* Middleware factory that checks promptGroup permissions when accessing individual prompts.
* This allows permission management at the promptGroup level while still supporting
* individual prompt access patterns.
*
* @param {Object} options - Configuration options
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
* @param {string} [options.resourceIdParam='promptId'] - The name of the route parameter containing the prompt ID
* @returns {Function} Express middleware function
*
* @example
* // Check promptGroup permissions when viewing a prompt
* router.get('/prompts/:promptId',
* canAccessPromptViaGroup({ requiredPermission: 1 }),
* getPrompt
* );
*/
const canAccessPromptViaGroup = (options) => {
const { requiredPermission, resourceIdParam = 'promptId' } = options;
if (!requiredPermission || typeof requiredPermission !== 'number') {
throw new Error('canAccessPromptViaGroup: requiredPermission is required and must be a number');
}
return canAccessResource({
resourceType: ResourceType.PROMPTGROUP,
requiredPermission,
resourceIdParam,
idResolver: resolvePromptToGroupId,
});
};
module.exports = {
canAccessPromptViaGroup,
};

View File

@@ -1,158 +0,0 @@
const { logger } = require('@librechat/data-schemas');
const { SystemRoles } = require('librechat-data-provider');
const { checkPermission } = require('~/server/services/PermissionService');
/**
* Generic base middleware factory that creates middleware to check resource access permissions.
* This middleware expects MongoDB ObjectIds as resource identifiers for ACL permission checks.
*
* @param {Object} options - Configuration options
* @param {string} options.resourceType - The type of resource (e.g., 'agent', 'file', 'project')
* @param {number} options.requiredPermission - The permission bit required (1=view, 2=edit, 4=delete, 8=share)
* @param {string} [options.resourceIdParam='resourceId'] - The name of the route parameter containing the resource ID
* @param {Function} [options.idResolver] - Optional function to resolve custom IDs to ObjectIds
* @returns {Function} Express middleware function
*
* @example
* // Direct usage with ObjectId (for resources that use MongoDB ObjectId in routes)
* router.get('/prompts/:promptId',
* canAccessResource({ resourceType: 'prompt', requiredPermission: 1 }),
* getPrompt
* );
*
* @example
* // Usage with custom ID resolver (for resources that use custom string IDs)
* router.get('/agents/:id',
* canAccessResource({
* resourceType: 'agent',
* requiredPermission: 1,
* resourceIdParam: 'id',
* idResolver: (customId) => resolveAgentId(customId)
* }),
* getAgent
* );
*/
const canAccessResource = (options) => {
const {
resourceType,
requiredPermission,
resourceIdParam = 'resourceId',
idResolver = null,
} = options;
if (!resourceType || typeof resourceType !== 'string') {
throw new Error('canAccessResource: resourceType is required and must be a string');
}
if (!requiredPermission || typeof requiredPermission !== 'number') {
throw new Error('canAccessResource: requiredPermission is required and must be a number');
}
return async (req, res, next) => {
try {
// Extract resource ID from route parameters
const rawResourceId = req.params[resourceIdParam];
if (!rawResourceId) {
logger.warn(`[canAccessResource] Missing ${resourceIdParam} in route parameters`);
return res.status(400).json({
error: 'Bad Request',
message: `${resourceIdParam} is required`,
});
}
// Check if user is authenticated
if (!req.user || !req.user.id) {
logger.warn(
`[canAccessResource] Unauthenticated request for ${resourceType} ${rawResourceId}`,
);
return res.status(401).json({
error: 'Unauthorized',
message: 'Authentication required',
});
}
// if system admin let through
if (req.user.role === SystemRoles.ADMIN) {
return next();
}
const userId = req.user.id;
let resourceId = rawResourceId;
let resourceInfo = null;
// Resolve custom ID to ObjectId if resolver is provided
if (idResolver) {
logger.debug(
`[canAccessResource] Resolving ${resourceType} custom ID ${rawResourceId} to ObjectId`,
);
const resolutionResult = await idResolver(rawResourceId);
if (!resolutionResult) {
logger.warn(`[canAccessResource] ${resourceType} not found: ${rawResourceId}`);
return res.status(404).json({
error: 'Not Found',
message: `${resourceType} not found`,
});
}
// Handle different resolver return formats
if (typeof resolutionResult === 'string' || resolutionResult._id) {
resourceId = resolutionResult._id || resolutionResult;
resourceInfo = typeof resolutionResult === 'object' ? resolutionResult : null;
} else {
resourceId = resolutionResult;
}
logger.debug(
`[canAccessResource] Resolved ${resourceType} ${rawResourceId} to ObjectId ${resourceId}`,
);
}
// Check permissions using PermissionService with ObjectId
const hasPermission = await checkPermission({
userId,
role: req.user.role,
resourceType,
resourceId,
requiredPermission,
});
if (hasPermission) {
logger.debug(
`[canAccessResource] User ${userId} has permission ${requiredPermission} on ${resourceType} ${rawResourceId} (${resourceId})`,
);
req.resourceAccess = {
resourceType,
resourceId, // MongoDB ObjectId for ACL operations
customResourceId: rawResourceId, // Original ID from route params
permission: requiredPermission,
userId,
...(resourceInfo && { resourceInfo }),
};
return next();
}
logger.warn(
`[canAccessResource] User ${userId} denied access to ${resourceType} ${rawResourceId} ` +
`(required permission: ${requiredPermission})`,
);
return res.status(403).json({
error: 'Forbidden',
message: `Insufficient permissions to access this ${resourceType}`,
});
} catch (error) {
logger.error(`[canAccessResource] Error checking access for ${resourceType}:`, error);
return res.status(500).json({
error: 'Internal Server Error',
message: 'Failed to check resource access permissions',
});
}
};
};
module.exports = {
canAccessResource,
};

View File

@@ -1,125 +0,0 @@
const { logger } = require('@librechat/data-schemas');
const { PermissionBits, hasPermissions, ResourceType } = require('librechat-data-provider');
const { getEffectivePermissions } = require('~/server/services/PermissionService');
const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
/**
* Checks if user has access to a file through agent permissions
* Files inherit permissions from agents - if you can view the agent, you can access its files
*/
const checkAgentBasedFileAccess = async ({ userId, role, fileId }) => {
try {
// Find agents that have this file in their tool_resources
const agentsWithFile = await getAgent({
$or: [
{ 'tool_resources.file_search.file_ids': fileId },
{ 'tool_resources.execute_code.file_ids': fileId },
{ 'tool_resources.ocr.file_ids': fileId },
],
});
if (!agentsWithFile || agentsWithFile.length === 0) {
return false;
}
// Check if user has access to any of these agents
for (const agent of Array.isArray(agentsWithFile) ? agentsWithFile : [agentsWithFile]) {
// Check if user is the agent author
if (agent.author && agent.author.toString() === userId) {
logger.debug(`[fileAccess] User is author of agent ${agent.id}`);
return true;
}
// Check ACL permissions for VIEW access on the agent
try {
const permissions = await getEffectivePermissions({
userId,
role,
resourceType: ResourceType.AGENT,
resourceId: agent._id || agent.id,
});
if (hasPermissions(permissions, PermissionBits.VIEW)) {
logger.debug(`[fileAccess] User ${userId} has VIEW permissions on agent ${agent.id}`);
return true;
}
} catch (permissionError) {
logger.warn(
`[fileAccess] Permission check failed for agent ${agent.id}:`,
permissionError.message,
);
// Continue checking other agents
}
}
return false;
} catch (error) {
logger.error('[fileAccess] Error checking agent-based access:', error);
return false;
}
};
/**
* Middleware to check if user can access a file
* Checks: 1) File ownership, 2) Agent-based access (file inherits agent permissions)
*/
const fileAccess = async (req, res, next) => {
try {
const fileId = req.params.file_id;
const userId = req.user?.id;
const userRole = req.user?.role;
if (!fileId) {
return res.status(400).json({
error: 'Bad Request',
message: 'file_id is required',
});
}
if (!userId) {
return res.status(401).json({
error: 'Unauthorized',
message: 'Authentication required',
});
}
// Get the file
const [file] = await getFiles({ file_id: fileId });
if (!file) {
return res.status(404).json({
error: 'Not Found',
message: 'File not found',
});
}
// Check if user owns the file
if (file.user && file.user.toString() === userId) {
req.fileAccess = { file };
return next();
}
// Check agent-based access (file inherits agent permissions)
const hasAgentAccess = await checkAgentBasedFileAccess({ userId, role: userRole, fileId });
if (hasAgentAccess) {
req.fileAccess = { file };
return next();
}
// No access
logger.warn(`[fileAccess] User ${userId} denied access to file ${fileId}`);
return res.status(403).json({
error: 'Forbidden',
message: 'Insufficient permissions to access this file',
});
} catch (error) {
logger.error('[fileAccess] Error checking file access:', error);
return res.status(500).json({
error: 'Internal Server Error',
message: 'Failed to check file access permissions',
});
}
};
module.exports = {
fileAccess,
};

View File

@@ -1,13 +0,0 @@
const { canAccessResource } = require('./canAccessResource');
const { canAccessAgentResource } = require('./canAccessAgentResource');
const { canAccessAgentFromBody } = require('./canAccessAgentFromBody');
const { canAccessPromptViaGroup } = require('./canAccessPromptViaGroup');
const { canAccessPromptGroupResource } = require('./canAccessPromptGroupResource');
module.exports = {
canAccessResource,
canAccessAgentResource,
canAccessAgentFromBody,
canAccessPromptViaGroup,
canAccessPromptGroupResource,
};

View File

@@ -1,82 +0,0 @@
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
/**
* Middleware to check if user has permission to access people picker functionality
* Checks specific permission based on the 'type' query parameter:
* - type=user: requires VIEW_USERS permission
* - type=group: requires VIEW_GROUPS permission
* - type=role: requires VIEW_ROLES permission
* - no type (mixed search): requires either VIEW_USERS OR VIEW_GROUPS OR VIEW_ROLES
*/
const checkPeoplePickerAccess = async (req, res, next) => {
try {
const user = req.user;
if (!user || !user.role) {
return res.status(401).json({
error: 'Unauthorized',
message: 'Authentication required',
});
}
const role = await getRoleByName(user.role);
if (!role || !role.permissions) {
return res.status(403).json({
error: 'Forbidden',
message: 'No permissions configured for user role',
});
}
const { type } = req.query;
const peoplePickerPerms = role.permissions[PermissionTypes.PEOPLE_PICKER] || {};
const canViewUsers = peoplePickerPerms[Permissions.VIEW_USERS] === true;
const canViewGroups = peoplePickerPerms[Permissions.VIEW_GROUPS] === true;
const canViewRoles = peoplePickerPerms[Permissions.VIEW_ROLES] === true;
const permissionChecks = {
[PrincipalType.USER]: {
hasPermission: canViewUsers,
message: 'Insufficient permissions to search for users',
},
[PrincipalType.GROUP]: {
hasPermission: canViewGroups,
message: 'Insufficient permissions to search for groups',
},
[PrincipalType.ROLE]: {
hasPermission: canViewRoles,
message: 'Insufficient permissions to search for roles',
},
};
const check = permissionChecks[type];
if (check && !check.hasPermission) {
return res.status(403).json({
error: 'Forbidden',
message: check.message,
});
}
if (!type && !canViewUsers && !canViewGroups && !canViewRoles) {
return res.status(403).json({
error: 'Forbidden',
message: 'Insufficient permissions to search for users, groups, or roles',
});
}
next();
} catch (error) {
logger.error(
`[checkPeoplePickerAccess][${req.user?.id}] checkPeoplePickerAccess error for req.query.type = ${req.query.type}`,
error,
);
return res.status(500).json({
error: 'Internal Server Error',
message: 'Failed to check permissions',
});
}
};
module.exports = {
checkPeoplePickerAccess,
};

View File

@@ -1,250 +0,0 @@
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
const { checkPeoplePickerAccess } = require('./checkPeoplePickerAccess');
const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
jest.mock('~/models/Role');
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
},
}));
describe('checkPeoplePickerAccess', () => {
let req, res, next;
beforeEach(() => {
req = {
user: { id: 'user123', role: 'USER' },
query: {},
};
res = {
status: jest.fn().mockReturnThis(),
json: jest.fn(),
};
next = jest.fn();
jest.clearAllMocks();
});
it('should return 401 if user is not authenticated', async () => {
req.user = null;
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(401);
expect(res.json).toHaveBeenCalledWith({
error: 'Unauthorized',
message: 'Authentication required',
});
expect(next).not.toHaveBeenCalled();
});
it('should return 403 if role has no permissions', async () => {
getRoleByName.mockResolvedValue(null);
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'No permissions configured for user role',
});
expect(next).not.toHaveBeenCalled();
});
it('should allow access when searching for users with VIEW_USERS permission', async () => {
req.query.type = PrincipalType.USER;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: true,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: false,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
it('should deny access when searching for users without VIEW_USERS permission', async () => {
req.query.type = PrincipalType.USER;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: true,
[Permissions.VIEW_ROLES]: true,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to search for users',
});
expect(next).not.toHaveBeenCalled();
});
it('should allow access when searching for groups with VIEW_GROUPS permission', async () => {
req.query.type = PrincipalType.GROUP;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: true,
[Permissions.VIEW_ROLES]: false,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
it('should deny access when searching for groups without VIEW_GROUPS permission', async () => {
req.query.type = PrincipalType.GROUP;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: true,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: true,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to search for groups',
});
expect(next).not.toHaveBeenCalled();
});
it('should allow access when searching for roles with VIEW_ROLES permission', async () => {
req.query.type = PrincipalType.ROLE;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: true,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
it('should deny access when searching for roles without VIEW_ROLES permission', async () => {
req.query.type = PrincipalType.ROLE;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: true,
[Permissions.VIEW_GROUPS]: true,
[Permissions.VIEW_ROLES]: false,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to search for roles',
});
expect(next).not.toHaveBeenCalled();
});
it('should allow mixed search when user has at least one permission', async () => {
// No type specified = mixed search
req.query.type = undefined;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: true,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
it('should deny mixed search when user has no permissions', async () => {
// No type specified = mixed search
req.query.type = undefined;
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: false,
},
},
});
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to search for users, groups, or roles',
});
expect(next).not.toHaveBeenCalled();
});
it('should handle errors gracefully', async () => {
const error = new Error('Database error');
getRoleByName.mockRejectedValue(error);
await checkPeoplePickerAccess(req, res, next);
expect(logger.error).toHaveBeenCalledWith(
'[checkPeoplePickerAccess][user123] checkPeoplePickerAccess error for req.query.type = undefined',
error,
);
expect(res.status).toHaveBeenCalledWith(500);
expect(res.json).toHaveBeenCalledWith({
error: 'Internal Server Error',
message: 'Failed to check permissions',
});
expect(next).not.toHaveBeenCalled();
});
it('should handle missing permissions object gracefully', async () => {
req.query.type = PrincipalType.USER;
getRoleByName.mockResolvedValue({
permissions: {}, // No PEOPLE_PICKER permissions
});
await checkPeoplePickerAccess(req, res, next);
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({
error: 'Forbidden',
message: 'Insufficient permissions to search for users',
});
expect(next).not.toHaveBeenCalled();
});
});

View File

@@ -8,7 +8,6 @@ const concurrentLimiter = require('./concurrentLimiter');
const validateEndpoint = require('./validateEndpoint');
const requireLocalAuth = require('./requireLocalAuth');
const canDeleteAccount = require('./canDeleteAccount');
const accessResources = require('./accessResources');
const setBalanceConfig = require('./setBalanceConfig');
const requireLdapAuth = require('./requireLdapAuth');
const abortMiddleware = require('./abortMiddleware');
@@ -30,7 +29,6 @@ module.exports = {
...validate,
...limiters,
...roles,
...accessResources,
noIndex,
checkBan,
uaParser,

View File

@@ -1,370 +0,0 @@
const mongoose = require('mongoose');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { checkAccess, generateCheckAccess } = require('@librechat/api');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const { getRoleByName } = require('~/models/Role');
const { Role } = require('~/db/models');
// Mock the logger from @librechat/data-schemas
jest.mock('@librechat/data-schemas', () => ({
...jest.requireActual('@librechat/data-schemas'),
logger: {
warn: jest.fn(),
error: jest.fn(),
info: jest.fn(),
debug: jest.fn(),
},
}));
// Mock the cache to use a simple in-memory implementation
const mockCache = new Map();
jest.mock('~/cache/getLogStores', () => {
return jest.fn(() => ({
get: jest.fn(async (key) => mockCache.get(key)),
set: jest.fn(async (key, value) => mockCache.set(key, value)),
clear: jest.fn(async () => mockCache.clear()),
}));
});
describe('Access Middleware', () => {
let mongoServer;
let req, res, next;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
await mongoose.connection.dropDatabase();
mockCache.clear(); // Clear the cache between tests
// Create test roles
await Role.create({
name: 'user',
permissions: {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.PROMPTS]: {
[Permissions.SHARED_GLOBAL]: false,
[Permissions.USE]: true,
[Permissions.CREATE]: true,
},
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: true,
[Permissions.CREATE]: true,
[Permissions.UPDATE]: true,
[Permissions.READ]: true,
[Permissions.OPT_OUT]: true,
},
[PermissionTypes.AGENTS]: {
[Permissions.USE]: true,
[Permissions.CREATE]: false,
[Permissions.SHARED_GLOBAL]: false,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
},
});
await Role.create({
name: 'admin',
permissions: {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.PROMPTS]: {
[Permissions.SHARED_GLOBAL]: true,
[Permissions.USE]: true,
[Permissions.CREATE]: true,
},
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: true,
[Permissions.CREATE]: true,
[Permissions.UPDATE]: true,
[Permissions.READ]: true,
[Permissions.OPT_OUT]: true,
},
[PermissionTypes.AGENTS]: {
[Permissions.USE]: true,
[Permissions.CREATE]: true,
[Permissions.SHARED_GLOBAL]: true,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
},
});
// Create limited role with no AGENTS permissions
await Role.create({
name: 'limited',
permissions: {
// Explicitly set AGENTS permissions to false
[PermissionTypes.AGENTS]: {
[Permissions.USE]: false,
[Permissions.CREATE]: false,
[Permissions.SHARED_GLOBAL]: false,
},
// Has permissions for other types
[PermissionTypes.PROMPTS]: {
[Permissions.USE]: true,
},
},
});
req = {
user: { id: 'user123', role: 'user' },
body: {},
originalUrl: '/test',
};
res = {
status: jest.fn().mockReturnThis(),
json: jest.fn(),
};
next = jest.fn();
jest.clearAllMocks();
});
describe('checkAccess', () => {
test('should return false if user is not provided', async () => {
const result = await checkAccess({
user: null,
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
expect(result).toBe(false);
});
test('should return true if user has required permission', async () => {
const result = await checkAccess({
req: {},
user: { id: 'user123', role: 'user' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
expect(result).toBe(true);
});
test('should return false if user lacks required permission', async () => {
const result = await checkAccess({
req: {},
user: { id: 'user123', role: 'user' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE],
getRoleByName,
});
expect(result).toBe(false);
});
test('should return false if user has only some of multiple permissions', async () => {
// User has USE but not CREATE, so should fail when checking for both
const result = await checkAccess({
req: {},
user: { id: 'user123', role: 'user' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE, Permissions.USE],
getRoleByName,
});
expect(result).toBe(false);
});
test('should return true if user has all of multiple permissions', async () => {
// Admin has both USE and CREATE
const result = await checkAccess({
req: {},
user: { id: 'admin123', role: 'admin' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE, Permissions.USE],
getRoleByName,
});
expect(result).toBe(true);
});
test('should check body properties when permission is not directly granted', async () => {
const req = { body: { id: 'agent123' } };
const result = await checkAccess({
req,
user: { id: 'user123', role: 'user' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.UPDATE],
bodyProps: {
[Permissions.UPDATE]: ['id'],
},
checkObject: req.body,
getRoleByName,
});
expect(result).toBe(true);
});
test('should return false if role is not found', async () => {
const result = await checkAccess({
req: {},
user: { id: 'user123', role: 'nonexistent' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
expect(result).toBe(false);
});
test('should return false if role has no permissions for the requested type', async () => {
const result = await checkAccess({
req: {},
user: { id: 'user123', role: 'limited' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
expect(result).toBe(false);
});
test('should handle admin role with all permissions', async () => {
const createResult = await checkAccess({
req: {},
user: { id: 'admin123', role: 'admin' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE],
getRoleByName,
});
expect(createResult).toBe(true);
const shareResult = await checkAccess({
req: {},
user: { id: 'admin123', role: 'admin' },
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.SHARED_GLOBAL],
getRoleByName,
});
expect(shareResult).toBe(true);
});
});
describe('generateCheckAccess', () => {
test('should call next() when user has required permission', async () => {
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
test('should return 403 when user lacks permission', async () => {
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE],
getRoleByName,
});
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({ message: 'Forbidden: Insufficient permissions' });
});
test('should check body properties when configured', async () => {
req.body = { agentId: 'agent123', description: 'test' };
const bodyProps = {
[Permissions.CREATE]: ['agentId'],
};
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.CREATE],
bodyProps,
getRoleByName,
});
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
expect(res.status).not.toHaveBeenCalled();
});
test('should handle database errors gracefully', async () => {
// Mock getRoleByName to throw an error
const mockGetRoleByName = jest
.fn()
.mockRejectedValue(new Error('Database connection failed'));
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName: mockGetRoleByName,
});
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(500);
expect(res.json).toHaveBeenCalledWith({
message: expect.stringContaining('Server error:'),
});
});
test('should work with multiple permission types', async () => {
req.user.role = 'admin';
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE, Permissions.CREATE, Permissions.SHARED_GLOBAL],
getRoleByName,
});
await middleware(req, res, next);
expect(next).toHaveBeenCalled();
});
test('should handle missing user gracefully', async () => {
req.user = null;
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({ message: 'Forbidden: Insufficient permissions' });
});
test('should handle role with no AGENTS permissions', async () => {
await Role.create({
name: 'noaccess',
permissions: {
// Explicitly set AGENTS with all permissions false
[PermissionTypes.AGENTS]: {
[Permissions.USE]: false,
[Permissions.CREATE]: false,
[Permissions.SHARED_GLOBAL]: false,
},
},
});
req.user.role = 'noaccess';
const middleware = generateCheckAccess({
permissionType: PermissionTypes.AGENTS,
permissions: [Permissions.USE],
getRoleByName,
});
await middleware(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(403);
expect(res.json).toHaveBeenCalledWith({ message: 'Forbidden: Insufficient permissions' });
});
});
});

View File

@@ -33,7 +33,7 @@ const validateModel = async (req, res, next) => {
return next();
}
const { ILLEGAL_MODEL_REQ_SCORE: score = 1 } = process.env ?? {};
const { ILLEGAL_MODEL_REQ_SCORE: score = 5 } = process.env ?? {};
const type = ViolationTypes.ILLEGAL_MODEL_REQUEST;
const errorMessage = {

View File

@@ -1,85 +0,0 @@
const express = require('express');
const { ResourceType, PermissionBits } = require('librechat-data-provider');
const {
getUserEffectivePermissions,
updateResourcePermissions,
getResourcePermissions,
getResourceRoles,
searchPrincipals,
} = require('~/server/controllers/PermissionsController');
const { requireJwtAuth, checkBan, uaParser, canAccessResource } = require('~/server/middleware');
const { checkPeoplePickerAccess } = require('~/server/middleware/checkPeoplePickerAccess');
const router = express.Router();
// Apply common middleware
router.use(requireJwtAuth);
router.use(checkBan);
router.use(uaParser);
/**
* Generic routes for resource permissions
* Pattern: /api/permissions/{resourceType}/{resourceId}
*/
/**
* GET /api/permissions/search-principals
* Search for users and groups to grant permissions
*/
router.get('/search-principals', checkPeoplePickerAccess, searchPrincipals);
/**
* GET /api/permissions/{resourceType}/roles
* Get available roles for a resource type
*/
router.get('/:resourceType/roles', getResourceRoles);
/**
* GET /api/permissions/{resourceType}/{resourceId}
* Get all permissions for a specific resource
*/
router.get('/:resourceType/:resourceId', getResourcePermissions);
/**
* PUT /api/permissions/{resourceType}/{resourceId}
* Bulk update permissions for a specific resource
*/
router.put(
'/:resourceType/:resourceId',
// Use middleware that dynamically handles resource type and permissions
(req, res, next) => {
const { resourceType } = req.params;
let middleware;
if (resourceType === ResourceType.AGENT) {
middleware = canAccessResource({
resourceType: ResourceType.AGENT,
requiredPermission: PermissionBits.SHARE,
resourceIdParam: 'resourceId',
});
} else if (resourceType === ResourceType.PROMPTGROUP) {
middleware = canAccessResource({
resourceType: ResourceType.PROMPTGROUP,
requiredPermission: PermissionBits.SHARE,
resourceIdParam: 'resourceId',
});
} else {
return res.status(400).json({
error: 'Bad Request',
message: `Unsupported resource type: ${resourceType}`,
});
}
// Execute the middleware
middleware(req, res, next);
},
updateResourcePermissions,
);
/**
* GET /api/permissions/{resourceType}/{resourceId}/effective
* Get user's effective permissions for a specific resource
*/
router.get('/:resourceType/:resourceId/effective', getUserEffectivePermissions);
module.exports = router;

View File

@@ -3,19 +3,16 @@ const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
const { generateCheckAccess } = require('@librechat/api');
const {
SystemRoles,
Permissions,
ResourceType,
PermissionTypes,
actionDelimiter,
PermissionBits,
removeNullishValues,
} = require('librechat-data-provider');
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
const { findAccessibleResources } = require('~/server/services/PermissionService');
const { getAgent, updateAgent, getListAgentsByAccess } = require('~/models/Agent');
const { updateAction, getActions, deleteAction } = require('~/models/Action');
const { isActionDomainAllowed } = require('~/server/services/domains');
const { canAccessAgentResource } = require('~/server/middleware');
const { getAgent, updateAgent } = require('~/models/Agent');
const { getRoleByName } = require('~/models/Role');
const router = express.Router();
@@ -26,6 +23,12 @@ const checkAgentCreate = generateCheckAccess({
getRoleByName,
});
// If the user has ADMIN role
// then action edition is possible even if not owner of the assistant
const isAdmin = (req) => {
return req.user.role === SystemRoles.ADMIN;
};
/**
* Retrieves all user's actions
* @route GET /actions/
@@ -34,23 +37,10 @@ const checkAgentCreate = generateCheckAccess({
*/
router.get('/', async (req, res) => {
try {
const userId = req.user.id;
const editableAgentObjectIds = await findAccessibleResources({
userId,
role: req.user.role,
resourceType: ResourceType.AGENT,
requiredPermissions: PermissionBits.EDIT,
});
const agentsResponse = await getListAgentsByAccess({
accessibleIds: editableAgentObjectIds,
});
const editableAgentIds = agentsResponse.data.map((agent) => agent.id);
const actions =
editableAgentIds.length > 0 ? await getActions({ agent_id: { $in: editableAgentIds } }) : [];
res.json(actions);
const admin = isAdmin(req);
// If admin, get all actions, otherwise only user's actions
const searchParams = admin ? {} : { user: req.user.id };
res.json(await getActions(searchParams));
} catch (error) {
res.status(500).json({ error: error.message });
}
@@ -65,111 +55,106 @@ router.get('/', async (req, res) => {
* @param {ActionMetadata} req.body.metadata - Metadata for the action.
* @returns {Object} 200 - success response - application/json
*/
router.post(
'/:agent_id',
canAccessAgentResource({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'agent_id',
}),
checkAgentCreate,
async (req, res) => {
try {
const { agent_id } = req.params;
router.post('/:agent_id', checkAgentCreate, async (req, res) => {
try {
const { agent_id } = req.params;
/** @type {{ functions: FunctionTool[], action_id: string, metadata: ActionMetadata }} */
const { functions, action_id: _action_id, metadata: _metadata } = req.body;
if (!functions.length) {
return res.status(400).json({ message: 'No functions provided' });
}
let metadata = await encryptMetadata(removeNullishValues(_metadata, true));
const isDomainAllowed = await isActionDomainAllowed(metadata.domain);
if (!isDomainAllowed) {
return res.status(400).json({ message: 'Domain not allowed' });
}
let { domain } = metadata;
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
}
const action_id = _action_id ?? nanoid();
const initialPromises = [];
// Permissions already validated by middleware - load agent directly
initialPromises.push(getAgent({ id: agent_id }));
if (_action_id) {
initialPromises.push(getActions({ action_id }, true));
}
/** @type {[Agent, [Action|undefined]]} */
const [agent, actions_result] = await Promise.all(initialPromises);
if (!agent) {
return res.status(404).json({ message: 'Agent not found for adding action' });
}
if (actions_result && actions_result.length) {
const action = actions_result[0];
metadata = { ...action.metadata, ...metadata };
}
const { actions: _actions = [], author: agent_author } = agent ?? {};
const actions = [];
for (const action of _actions) {
const [_action_domain, current_action_id] = action.split(actionDelimiter);
if (current_action_id === action_id) {
continue;
}
actions.push(action);
}
actions.push(`${domain}${actionDelimiter}${action_id}`);
/** @type {string[]}} */
const { tools: _tools = [] } = agent;
const tools = _tools
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
// Force version update since actions are changing
const updatedAgent = await updateAgent(
{ id: agent_id },
{ tools, actions },
{
updatingUserId: req.user.id,
forceVersion: true,
},
);
// Only update user field for new actions
const actionUpdateData = { metadata, agent_id };
if (!actions_result || !actions_result.length) {
// For new actions, use the agent owner's user ID
actionUpdateData.user = agent_author || req.user.id;
}
/** @type {[Action]} */
const updatedAction = await updateAction({ action_id }, actionUpdateData);
const sensitiveFields = ['api_key', 'oauth_client_id', 'oauth_client_secret'];
for (let field of sensitiveFields) {
if (updatedAction.metadata[field]) {
delete updatedAction.metadata[field];
}
}
res.json([updatedAgent, updatedAction]);
} catch (error) {
const message = 'Trouble updating the Agent Action';
logger.error(message, error);
res.status(500).json({ message });
/** @type {{ functions: FunctionTool[], action_id: string, metadata: ActionMetadata }} */
const { functions, action_id: _action_id, metadata: _metadata } = req.body;
if (!functions.length) {
return res.status(400).json({ message: 'No functions provided' });
}
},
);
let metadata = await encryptMetadata(removeNullishValues(_metadata, true));
const isDomainAllowed = await isActionDomainAllowed(metadata.domain);
if (!isDomainAllowed) {
return res.status(400).json({ message: 'Domain not allowed' });
}
let { domain } = metadata;
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
}
const action_id = _action_id ?? nanoid();
const initialPromises = [];
const admin = isAdmin(req);
// If admin, can edit any agent, otherwise only user's agents
const agentQuery = admin ? { id: agent_id } : { id: agent_id, author: req.user.id };
// TODO: share agents
initialPromises.push(getAgent(agentQuery));
if (_action_id) {
initialPromises.push(getActions({ action_id }, true));
}
/** @type {[Agent, [Action|undefined]]} */
const [agent, actions_result] = await Promise.all(initialPromises);
if (!agent) {
return res.status(404).json({ message: 'Agent not found for adding action' });
}
if (actions_result && actions_result.length) {
const action = actions_result[0];
metadata = { ...action.metadata, ...metadata };
}
const { actions: _actions = [], author: agent_author } = agent ?? {};
const actions = [];
for (const action of _actions) {
const [_action_domain, current_action_id] = action.split(actionDelimiter);
if (current_action_id === action_id) {
continue;
}
actions.push(action);
}
actions.push(`${domain}${actionDelimiter}${action_id}`);
/** @type {string[]}} */
const { tools: _tools = [] } = agent;
const tools = _tools
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
// Force version update since actions are changing
const updatedAgent = await updateAgent(
agentQuery,
{ tools, actions },
{
updatingUserId: req.user.id,
forceVersion: true,
},
);
// Only update user field for new actions
const actionUpdateData = { metadata, agent_id };
if (!actions_result || !actions_result.length) {
// For new actions, use the agent owner's user ID
actionUpdateData.user = agent_author || req.user.id;
}
/** @type {[Action]} */
const updatedAction = await updateAction({ action_id }, actionUpdateData);
const sensitiveFields = ['api_key', 'oauth_client_id', 'oauth_client_secret'];
for (let field of sensitiveFields) {
if (updatedAction.metadata[field]) {
delete updatedAction.metadata[field];
}
}
res.json([updatedAgent, updatedAction]);
} catch (error) {
const message = 'Trouble updating the Agent Action';
logger.error(message, error);
res.status(500).json({ message });
}
});
/**
* Deletes an action for a specific agent.
@@ -178,56 +163,52 @@ router.post(
* @param {string} req.params.action_id - The ID of the action to delete.
* @returns {Object} 200 - success response - application/json
*/
router.delete(
'/:agent_id/:action_id',
canAccessAgentResource({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'agent_id',
}),
checkAgentCreate,
async (req, res) => {
try {
const { agent_id, action_id } = req.params;
router.delete('/:agent_id/:action_id', checkAgentCreate, async (req, res) => {
try {
const { agent_id, action_id } = req.params;
const admin = isAdmin(req);
// Permissions already validated by middleware - load agent directly
const agent = await getAgent({ id: agent_id });
if (!agent) {
return res.status(404).json({ message: 'Agent not found for deleting action' });
}
const { tools = [], actions = [] } = agent;
let domain = '';
const updatedActions = actions.filter((action) => {
if (action.includes(action_id)) {
[domain] = action.split(actionDelimiter);
return false;
}
return true;
});
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
}
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
// Force version update since actions are being removed
await updateAgent(
{ id: agent_id },
{ tools: updatedTools, actions: updatedActions },
{ updatingUserId: req.user.id, forceVersion: true },
);
await deleteAction({ action_id });
res.status(200).json({ message: 'Action deleted successfully' });
} catch (error) {
const message = 'Trouble deleting the Agent Action';
logger.error(message, error);
res.status(500).json({ message });
// If admin, can delete any agent, otherwise only user's agents
const agentQuery = admin ? { id: agent_id } : { id: agent_id, author: req.user.id };
const agent = await getAgent(agentQuery);
if (!agent) {
return res.status(404).json({ message: 'Agent not found for deleting action' });
}
},
);
const { tools = [], actions = [] } = agent;
let domain = '';
const updatedActions = actions.filter((action) => {
if (action.includes(action_id)) {
[domain] = action.split(actionDelimiter);
return false;
}
return true;
});
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
}
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
// Force version update since actions are being removed
await updateAgent(
agentQuery,
{ tools: updatedTools, actions: updatedActions },
{ updatingUserId: req.user.id, forceVersion: true },
);
// If admin, can delete any action, otherwise only user's actions
const actionQuery = admin ? { action_id } : { action_id, user: req.user.id };
await deleteAction(actionQuery);
res.status(200).json({ message: 'Action deleted successfully' });
} catch (error) {
const message = 'Trouble deleting the Agent Action';
logger.error(message, error);
res.status(500).json({ message });
}
});
module.exports = router;

View File

@@ -1,13 +1,12 @@
const express = require('express');
const { generateCheckAccess, skipAgentCheck } = require('@librechat/api');
const { PermissionTypes, Permissions, PermissionBits } = require('librechat-data-provider');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const {
setHeaders,
moderateText,
// validateModel,
validateConvoAccess,
buildEndpointOption,
canAccessAgentFromBody,
} = require('~/server/middleware');
const { initializeClient } = require('~/server/services/Endpoints/agents');
const AgentController = require('~/server/controllers/agents/request');
@@ -24,12 +23,8 @@ const checkAgentAccess = generateCheckAccess({
skipCheck: skipAgentCheck,
getRoleByName,
});
const checkAgentResourceAccess = canAccessAgentFromBody({
requiredPermission: PermissionBits.VIEW,
});
router.use(checkAgentAccess);
router.use(checkAgentResourceAccess);
router.use(validateConvoAccess);
router.use(buildEndpointOption);
router.use(setHeaders);

View File

@@ -37,6 +37,4 @@ if (isEnabled(LIMIT_MESSAGE_USER)) {
chatRouter.use('/', chat);
router.use('/chat', chatRouter);
// Add marketplace routes
module.exports = router;

View File

@@ -1,7 +1,7 @@
const express = require('express');
const { generateCheckAccess } = require('@librechat/api');
const { PermissionTypes, Permissions, PermissionBits } = require('librechat-data-provider');
const { requireJwtAuth, canAccessAgentResource } = require('~/server/middleware');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const { requireJwtAuth } = require('~/server/middleware');
const v1 = require('~/server/controllers/agents/v1');
const { getRoleByName } = require('~/models/Role');
const actions = require('./actions');
@@ -44,11 +44,6 @@ router.use('/actions', actions);
*/
router.use('/tools', tools);
/**
* Get all agent categories with counts
* @route GET /agents/marketplace/categories
*/
router.get('/categories', v1.getAgentCategories);
/**
* Creates an agent.
* @route POST /agents
@@ -58,38 +53,13 @@ router.get('/categories', v1.getAgentCategories);
router.post('/', checkAgentCreate, v1.createAgent);
/**
* Retrieves basic agent information (VIEW permission required).
* Returns safe, non-sensitive agent data for viewing purposes.
* Retrieves an agent.
* @route GET /agents/:id
* @param {string} req.params.id - Agent identifier.
* @returns {Agent} 200 - Basic agent info - application/json
* @returns {Agent} 200 - Success response - application/json
*/
router.get(
'/:id',
checkAgentAccess,
canAccessAgentResource({
requiredPermission: PermissionBits.VIEW,
resourceIdParam: 'id',
}),
v1.getAgent,
);
router.get('/:id', checkAgentAccess, v1.getAgent);
/**
* Retrieves full agent details including sensitive configuration (EDIT permission required).
* Returns complete agent data for editing/configuration purposes.
* @route GET /agents/:id/expanded
* @param {string} req.params.id - Agent identifier.
* @returns {Agent} 200 - Full agent details - application/json
*/
router.get(
'/:id/expanded',
checkAgentAccess,
canAccessAgentResource({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'id',
}),
(req, res) => v1.getAgent(req, res, true), // Expanded version
);
/**
* Updates an agent.
* @route PATCH /agents/:id
@@ -97,15 +67,7 @@ router.get(
* @param {AgentUpdateParams} req.body - The agent update parameters.
* @returns {Agent} 200 - Success response - application/json
*/
router.patch(
'/:id',
checkGlobalAgentShare,
canAccessAgentResource({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'id',
}),
v1.updateAgent,
);
router.patch('/:id', checkGlobalAgentShare, v1.updateAgent);
/**
* Duplicates an agent.
@@ -113,15 +75,7 @@ router.patch(
* @param {string} req.params.id - Agent identifier.
* @returns {Agent} 201 - Success response - application/json
*/
router.post(
'/:id/duplicate',
checkAgentCreate,
canAccessAgentResource({
requiredPermission: PermissionBits.VIEW,
resourceIdParam: 'id',
}),
v1.duplicateAgent,
);
router.post('/:id/duplicate', checkAgentCreate, v1.duplicateAgent);
/**
* Deletes an agent.
@@ -129,15 +83,7 @@ router.post(
* @param {string} req.params.id - Agent identifier.
* @returns {Agent} 200 - success response - application/json
*/
router.delete(
'/:id',
checkAgentCreate,
canAccessAgentResource({
requiredPermission: PermissionBits.DELETE,
resourceIdParam: 'id',
}),
v1.deleteAgent,
);
router.delete('/:id', checkAgentCreate, v1.deleteAgent);
/**
* Reverts an agent to a previous version.
@@ -164,14 +110,6 @@ router.get('/', checkAgentAccess, v1.getListAgents);
* @param {string} [req.body.metadata] - Optional metadata for the agent's avatar.
* @returns {Object} 200 - success response - application/json
*/
avatar.post(
'/:agent_id/avatar/',
checkAgentAccess,
canAccessAgentResource({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'agent_id',
}),
v1.uploadAgentAvatar,
);
avatar.post('/:agent_id/avatar/', checkAgentAccess, v1.uploadAgentAvatar);
module.exports = { v1: router, avatar };

View File

@@ -4,7 +4,6 @@ const {
registrationController,
resetPasswordController,
resetPasswordRequestController,
graphTokenController,
} = require('~/server/controllers/AuthController');
const { loginController } = require('~/server/controllers/auth/LoginController');
const { logoutController } = require('~/server/controllers/auth/LogoutController');
@@ -70,6 +69,4 @@ router.post('/2fa/confirm', requireJwtAuth, confirm2FA);
router.post('/2fa/disable', requireJwtAuth, disable2FA);
router.post('/2fa/backup/regenerate', requireJwtAuth, regenerateBackupCodes);
router.get('/graph-token', requireJwtAuth, graphTokenController);
module.exports = router;

View File

@@ -21,9 +21,6 @@ const publicSharedLinksEnabled =
(process.env.ALLOW_SHARED_LINKS_PUBLIC === undefined ||
isEnabled(process.env.ALLOW_SHARED_LINKS_PUBLIC));
const sharePointFilePickerEnabled = isEnabled(process.env.ENABLE_SHAREPOINT_FILEPICKER);
const openidReuseTokens = isEnabled(process.env.OPENID_REUSE_TOKENS);
router.get('/', async function (req, res) {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
@@ -101,11 +98,6 @@ router.get('/', async function (req, res) {
instanceProjectId: instanceProject._id.toString(),
bundlerURL: process.env.SANDPACK_BUNDLER_URL,
staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL,
sharePointFilePickerEnabled,
sharePointBaseUrl: process.env.SHAREPOINT_BASE_URL,
sharePointPickerGraphScope: process.env.SHAREPOINT_PICKER_GRAPH_SCOPE,
sharePointPickerSharePointScope: process.env.SHAREPOINT_PICKER_SHAREPOINT_SCOPE,
openidReuseTokens,
};
payload.mcpServers = {};

View File

@@ -3,7 +3,6 @@ const express = require('express');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
const { filterFile } = require('~/server/services/Files/process');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
const { logger } = require('~/config');
const router = express.Router();
@@ -19,7 +18,7 @@ router.post('/', async (req, res) => {
throw new Error('User ID is undefined');
}
const fileStrategy = getFileStrategy(req.app.locals, { isAvatar: true });
const fileStrategy = req.app.locals.fileStrategy;
const desiredFormat = req.app.locals.imageOutputType;
const resizedBuffer = await resizeAvatar({
userId,

View File

@@ -2,13 +2,10 @@ const express = require('express');
const request = require('supertest');
const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { createMethods } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { AccessRoleIds, ResourceType, PrincipalType } = require('librechat-data-provider');
const { createAgent } = require('~/models/Agent');
const { createFile } = require('~/models/File');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
// Only mock the external dependencies that we don't want to test
// Mock dependencies
jest.mock('~/server/services/Files/process', () => ({
processDeleteRequest: jest.fn().mockResolvedValue({}),
filterFile: jest.fn(),
@@ -28,8 +25,31 @@ jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
// Import the router
const router = require('~/server/routes/files/files');
jest.mock('~/server/services/Files/S3/crud', () => ({
refreshS3FileUrls: jest.fn(),
}));
jest.mock('~/cache', () => ({
getLogStores: jest.fn(() => ({
get: jest.fn(),
set: jest.fn(),
})),
}));
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
warn: jest.fn(),
debug: jest.fn(),
},
}));
const { createFile } = require('~/models/File');
const { createAgent } = require('~/models/Agent');
const { getProjectByName } = require('~/models/Project');
// Import the router after mocks
const router = require('./files');
describe('File Routes - Agent Files Endpoint', () => {
let app;
@@ -40,42 +60,13 @@ describe('File Routes - Agent Files Endpoint', () => {
let fileId1;
let fileId2;
let fileId3;
let File;
let User;
let Agent;
let methods;
let AclEntry;
// eslint-disable-next-line no-unused-vars
let AccessRole;
let modelsToCleanup = [];
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
await mongoose.connect(mongoServer.getUri());
// Initialize all models using createModels
const { createModels } = require('@librechat/data-schemas');
const models = createModels(mongoose);
// Track which models we're adding
modelsToCleanup = Object.keys(models);
// Register models on mongoose.models so methods can access them
Object.assign(mongoose.models, models);
// Create methods with our test mongoose instance
methods = createMethods(mongoose);
// Now we can access models from the db/models
File = models.File;
Agent = models.Agent;
AclEntry = models.AclEntry;
User = models.User;
AccessRole = models.AccessRole;
// Seed default roles using our methods
await methods.seedDefaultRoles();
// Initialize models
require('~/db/models');
app = express();
app.use(express.json());
@@ -91,121 +82,88 @@ describe('File Routes - Agent Files Endpoint', () => {
});
afterAll(async () => {
// Clean up all collections before disconnecting
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
// Clear only the models we added
for (const modelName of modelsToCleanup) {
if (mongoose.models[modelName]) {
delete mongoose.models[modelName];
}
}
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
// Clean up all test data
await File.deleteMany({});
await Agent.deleteMany({});
await User.deleteMany({});
await AclEntry.deleteMany({});
// Don't delete AccessRole as they are seeded defaults needed for tests
jest.clearAllMocks();
// Create test users
authorId = new mongoose.Types.ObjectId();
otherUserId = new mongoose.Types.ObjectId();
// Clear database
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
authorId = new mongoose.Types.ObjectId().toString();
otherUserId = new mongoose.Types.ObjectId().toString();
agentId = uuidv4();
fileId1 = uuidv4();
fileId2 = uuidv4();
fileId3 = uuidv4();
// Create users in database
await User.create({
_id: authorId,
username: 'author',
email: 'author@test.com',
});
await User.create({
_id: otherUserId,
username: 'other',
email: 'other@test.com',
});
// Create files
await createFile({
user: authorId,
file_id: fileId1,
filename: 'file1.txt',
filepath: '/uploads/file1.txt',
bytes: 100,
filename: 'agent-file1.txt',
filepath: `/uploads/${authorId}/${fileId1}`,
bytes: 1024,
type: 'text/plain',
});
await createFile({
user: authorId,
file_id: fileId2,
filename: 'file2.txt',
filepath: '/uploads/file2.txt',
bytes: 200,
filename: 'agent-file2.txt',
filepath: `/uploads/${authorId}/${fileId2}`,
bytes: 2048,
type: 'text/plain',
});
await createFile({
user: otherUserId,
file_id: fileId3,
filename: 'file3.txt',
filepath: '/uploads/file3.txt',
bytes: 300,
filename: 'user-file.txt',
filepath: `/uploads/${otherUserId}/${fileId3}`,
bytes: 512,
type: 'text/plain',
});
// Create an agent with files attached
await createAgent({
id: agentId,
name: 'Test Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
isCollaborative: true,
tool_resources: {
file_search: {
file_ids: [fileId1, fileId2],
},
},
});
// Share the agent globally
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
if (globalProject) {
const { updateAgent } = require('~/models/Agent');
await updateAgent({ id: agentId }, { projectIds: [globalProject._id] });
}
});
describe('GET /files/agent/:agent_id', () => {
it('should return files accessible through the agent for non-author with EDIT permission', async () => {
// Create an agent with files attached
const agent = await createAgent({
id: agentId,
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId1, fileId2],
},
},
});
// Grant EDIT permission to user on the agent using PermissionService
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
// Mock req.user for this request
app.use((req, res, next) => {
req.user = { id: otherUserId.toString() };
next();
});
it('should return files accessible through the agent for non-author', async () => {
const response = await request(app).get(`/files/agent/${agentId}`);
expect(response.status).toBe(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body).toHaveLength(2);
expect(response.body.map((f) => f.file_id)).toContain(fileId1);
expect(response.body.map((f) => f.file_id)).toContain(fileId2);
expect(response.body).toHaveLength(2); // Only agent files, not user-owned files
const fileIds = response.body.map((f) => f.file_id);
expect(fileIds).toContain(fileId1);
expect(fileIds).toContain(fileId2);
expect(fileIds).not.toContain(fileId3); // User's own file not included
});
it('should return 400 when agent_id is not provided', async () => {
@@ -218,63 +176,45 @@ describe('File Routes - Agent Files Endpoint', () => {
const response = await request(app).get('/files/agent/non-existent-agent');
expect(response.status).toBe(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body).toEqual([]);
expect(response.body).toEqual([]); // Empty array for non-existent agent
});
it('should return empty array when user only has VIEW permission', async () => {
// Create an agent with files attached
const agent = await createAgent({
id: agentId,
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
it('should return empty array when agent is not collaborative', async () => {
// Create a non-collaborative agent
const nonCollabAgentId = uuidv4();
await createAgent({
id: nonCollabAgentId,
name: 'Non-Collaborative Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
isCollaborative: false,
tool_resources: {
file_search: {
file_ids: [fileId1, fileId2],
file_ids: [fileId1],
},
},
});
// Grant only VIEW permission to user on the agent
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_VIEWER,
grantedBy: authorId,
});
// Share it globally
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
if (globalProject) {
const { updateAgent } = require('~/models/Agent');
await updateAgent({ id: nonCollabAgentId }, { projectIds: [globalProject._id] });
}
const response = await request(app).get(`/files/agent/${agentId}`);
const response = await request(app).get(`/files/agent/${nonCollabAgentId}`);
expect(response.status).toBe(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body).toEqual([]);
expect(response.body).toEqual([]); // Empty array when not collaborative
});
it('should return agent files for agent author', async () => {
// Create an agent with files attached
await createAgent({
id: agentId,
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId1, fileId2],
},
},
});
// Create a new app instance with author authentication
const authorApp = express();
authorApp.use(express.json());
authorApp.use((req, res, next) => {
req.user = { id: authorId.toString() };
req.user = { id: authorId };
req.app = { locals: {} };
next();
});
@@ -283,48 +223,46 @@ describe('File Routes - Agent Files Endpoint', () => {
const response = await request(authorApp).get(`/files/agent/${agentId}`);
expect(response.status).toBe(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body).toHaveLength(2);
expect(response.body).toHaveLength(2); // Agent files for author
const fileIds = response.body.map((f) => f.file_id);
expect(fileIds).toContain(fileId1);
expect(fileIds).toContain(fileId2);
expect(fileIds).not.toContain(fileId3); // User's own file not included
});
it('should return files uploaded by other users to shared agent for author', async () => {
const anotherUserId = new mongoose.Types.ObjectId();
// Create a file uploaded by another user
const otherUserFileId = uuidv4();
await User.create({
_id: anotherUserId,
username: 'another',
email: 'another@test.com',
});
const anotherUserId = new mongoose.Types.ObjectId().toString();
await createFile({
user: anotherUserId,
file_id: otherUserFileId,
filename: 'other-user-file.txt',
filepath: '/uploads/other-user-file.txt',
bytes: 400,
filepath: `/uploads/${anotherUserId}/${otherUserFileId}`,
bytes: 4096,
type: 'text/plain',
});
// Create agent to include the file uploaded by another user
await createAgent({
id: agentId,
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId1, otherUserFileId],
// Update agent to include the file uploaded by another user
const { updateAgent } = require('~/models/Agent');
await updateAgent(
{ id: agentId },
{
tool_resources: {
file_search: {
file_ids: [fileId1, fileId2, otherUserFileId],
},
},
},
});
);
// Create a new app instance with author authentication
// Create app instance with author authentication
const authorApp = express();
authorApp.use(express.json());
authorApp.use((req, res, next) => {
req.user = { id: authorId.toString() };
req.user = { id: authorId };
req.app = { locals: {} };
next();
});
@@ -333,10 +271,12 @@ describe('File Routes - Agent Files Endpoint', () => {
const response = await request(authorApp).get(`/files/agent/${agentId}`);
expect(response.status).toBe(200);
expect(Array.isArray(response.body)).toBe(true);
expect(response.body).toHaveLength(2);
expect(response.body.map((f) => f.file_id)).toContain(fileId1);
expect(response.body.map((f) => f.file_id)).toContain(otherUserFileId);
expect(response.body).toHaveLength(3); // Including file from another user
const fileIds = response.body.map((f) => f.file_id);
expect(fileIds).toContain(fileId1);
expect(fileIds).toContain(fileId2);
expect(fileIds).toContain(otherUserFileId); // File uploaded by another user
});
});
});

View File

@@ -5,10 +5,9 @@ const {
Time,
isUUID,
CacheKeys,
Constants,
FileSources,
ResourceType,
EModelEndpoint,
PermissionBits,
isAgentsEndpoint,
checkOpenAIStorage,
} = require('librechat-data-provider');
@@ -18,15 +17,12 @@ const {
processDeleteRequest,
processAgentFileUpload,
} = require('~/server/services/Files/process');
const { fileAccess } = require('~/server/middleware/accessResources/fileAccess');
const { getFiles, batchUpdateFiles, hasAccessToFilesViaAgent } = require('~/models/File');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { checkPermission } = require('~/server/services/PermissionService');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
const { hasAccessToFilesViaAgent } = require('~/server/services/Files');
const { getFiles, batchUpdateFiles } = require('~/models/File');
const { cleanFileName } = require('~/server/utils/files');
const { getProjectByName } = require('~/models/Project');
const { getAssistant } = require('~/models/Assistant');
const { getAgent } = require('~/models/Agent');
const { getLogStores } = require('~/cache');
@@ -71,25 +67,29 @@ router.get('/agent/:agent_id', async (req, res) => {
return res.status(400).json({ error: 'Agent ID is required' });
}
// Get the agent to check ownership and attached files
const agent = await getAgent({ id: agent_id });
if (!agent) {
// No agent found, return empty array
return res.status(200).json([]);
}
// Check if user has access to the agent
if (agent.author.toString() !== userId) {
const hasEditPermission = await checkPermission({
userId,
role: req.user.role,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
requiredPermission: PermissionBits.EDIT,
});
// Non-authors need the agent to be globally shared and collaborative
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, '_id');
if (!hasEditPermission) {
if (
!globalProject ||
!agent.projectIds.some((pid) => pid.toString() === globalProject._id.toString()) ||
!agent.isCollaborative
) {
return res.status(200).json([]);
}
}
// Collect all file IDs from agent's tool resources
const agentFileIds = [];
if (agent.tool_resources) {
for (const [, resource] of Object.entries(agent.tool_resources)) {
@@ -99,10 +99,12 @@ router.get('/agent/:agent_id', async (req, res) => {
}
}
// If no files attached to agent, return empty array
if (agentFileIds.length === 0) {
return res.status(200).json([]);
}
// Get only the files attached to this agent
const files = await getFiles({ file_id: { $in: agentFileIds } }, null, { text: 0 });
res.status(200).json(files);
@@ -151,15 +153,18 @@ router.delete('/', async (req, res) => {
const ownedFiles = [];
const nonOwnedFiles = [];
const fileMap = new Map();
for (const file of dbFiles) {
if (file.user.toString() === req.user.id.toString()) {
fileMap.set(file.file_id, file);
if (file.user.toString() === req.user.id) {
ownedFiles.push(file);
} else {
nonOwnedFiles.push(file);
}
}
// If all files are owned by the user, no need for further checks
if (nonOwnedFiles.length === 0) {
await processDeleteRequest({ req, files: ownedFiles });
logger.debug(
@@ -172,18 +177,20 @@ router.delete('/', async (req, res) => {
return;
}
// Check access for non-owned files
let authorizedFiles = [...ownedFiles];
let unauthorizedFiles = [];
if (req.body.agent_id && nonOwnedFiles.length > 0) {
// Batch check access for all non-owned files
const nonOwnedFileIds = nonOwnedFiles.map((f) => f.file_id);
const accessMap = await hasAccessToFilesViaAgent({
userId: req.user.id,
role: req.user.role,
fileIds: nonOwnedFileIds,
agentId: req.body.agent_id,
});
const accessMap = await hasAccessToFilesViaAgent(
req.user.id,
nonOwnedFileIds,
req.body.agent_id,
);
// Separate authorized and unauthorized files
for (const file of nonOwnedFiles) {
if (accessMap.get(file.file_id)) {
authorizedFiles.push(file);
@@ -192,6 +199,7 @@ router.delete('/', async (req, res) => {
}
}
} else {
// No agent context, all non-owned files are unauthorized
unauthorizedFiles = nonOwnedFiles;
}
@@ -295,30 +303,42 @@ router.get('/code/download/:session_id/:fileId', async (req, res) => {
}
});
router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
router.get('/download/:userId/:file_id', async (req, res) => {
try {
const { userId, file_id } = req.params;
logger.debug(`File download requested by user ${userId}: ${file_id}`);
// Access already validated by fileAccess middleware
const file = req.fileAccess.file;
if (userId !== req.user.id) {
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
return res.status(403).send('Forbidden');
}
const [file] = await getFiles({ file_id });
const errorPrefix = `File download requested by user ${userId}`;
if (!file) {
logger.warn(`${errorPrefix} not found: ${file_id}`);
return res.status(404).send('File not found');
}
if (!file.filepath.includes(userId)) {
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
return res.status(403).send('Forbidden');
}
if (checkOpenAIStorage(file.source) && !file.model) {
logger.warn(`File download requested by user ${userId} has no associated model: ${file_id}`);
logger.warn(`${errorPrefix} has no associated model: ${file_id}`);
return res.status(400).send('The model used when creating this file is not available');
}
const { getDownloadStream } = getStrategyFunctions(file.source);
if (!getDownloadStream) {
logger.warn(
`File download requested by user ${userId} has no stream method implemented: ${file.source}`,
);
logger.warn(`${errorPrefix} has no stream method implemented: ${file.source}`);
return res.status(501).send('Not Implemented');
}
const setHeaders = () => {
const cleanedFilename = cleanFileName(file.filename);
res.setHeader('Content-Disposition', `attachment; filename="${cleanedFilename}"`);
res.setHeader('Content-Disposition', `attachment; filename="${file.filename}"`);
res.setHeader('Content-Type', 'application/octet-stream');
res.setHeader('X-File-Metadata', JSON.stringify(file));
};
@@ -345,17 +365,12 @@ router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
logger.debug(`File ${file_id} downloaded from OpenAI`);
passThrough.body.pipe(res);
} else {
fileStream = await getDownloadStream(req, file.filepath);
fileStream.on('error', (streamError) => {
logger.error('[DOWNLOAD ROUTE] Stream error:', streamError);
});
fileStream = getDownloadStream(file_id);
setHeaders();
fileStream.pipe(res);
}
} catch (error) {
logger.error('[DOWNLOAD ROUTE] Error downloading file:', error);
logger.error('Error downloading file:', error);
res.status(500).send('Error downloading file');
}
});
@@ -390,6 +405,7 @@ router.post('/', async (req, res) => {
message = error.message;
}
// TODO: delete remote file if it exists
try {
await fs.unlink(req.file.path);
cleanup = false;
@@ -397,15 +413,13 @@ router.post('/', async (req, res) => {
logger.error('[/files] Error deleting file:', error);
}
res.status(500).json({ message });
} finally {
if (cleanup) {
try {
await fs.unlink(req.file.path);
} catch (error) {
logger.error('[/files] Error deleting file after file processing:', error);
}
} else {
logger.debug('[/files] File processing completed without cleanup');
}
if (cleanup) {
try {
await fs.unlink(req.file.path);
} catch (error) {
logger.error('[/files] Error deleting file after file processing:', error);
}
}
});

View File

@@ -2,18 +2,10 @@ const express = require('express');
const request = require('supertest');
const mongoose = require('mongoose');
const { v4: uuidv4 } = require('uuid');
const { createMethods } = require('@librechat/data-schemas');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
SystemRoles,
ResourceType,
AccessRoleIds,
PrincipalType,
} = require('librechat-data-provider');
const { createAgent } = require('~/models/Agent');
const { createFile } = require('~/models/File');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
// Only mock the external dependencies that we don't want to test
// Mock dependencies
jest.mock('~/server/services/Files/process', () => ({
processDeleteRequest: jest.fn().mockResolvedValue({}),
filterFile: jest.fn(),
@@ -52,6 +44,9 @@ jest.mock('~/config', () => ({
},
}));
const { createFile } = require('~/models/File');
const { createAgent } = require('~/models/Agent');
const { getProjectByName } = require('~/models/Project');
const { processDeleteRequest } = require('~/server/services/Files/process');
// Import the router after mocks
@@ -62,49 +57,22 @@ describe('File Routes - Delete with Agent Access', () => {
let mongoServer;
let authorId;
let otherUserId;
let agentId;
let fileId;
let File;
let Agent;
let AclEntry;
let User;
let methods;
let modelsToCleanup = [];
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
await mongoose.connect(mongoServer.getUri());
// Initialize all models using createModels
const { createModels } = require('@librechat/data-schemas');
const models = createModels(mongoose);
// Track which models we're adding
modelsToCleanup = Object.keys(models);
// Register models on mongoose.models so methods can access them
Object.assign(mongoose.models, models);
// Create methods with our test mongoose instance
methods = createMethods(mongoose);
// Now we can access models from the db/models
File = models.File;
Agent = models.Agent;
AclEntry = models.AclEntry;
User = models.User;
// Seed default roles using our methods
await methods.seedDefaultRoles();
// Initialize models
require('~/db/models');
app = express();
app.use(express.json());
// Mock authentication middleware
app.use((req, res, next) => {
req.user = {
id: otherUserId || 'default-user',
role: SystemRoles.USER,
};
req.user = { id: otherUserId || 'default-user' };
req.app = { locals: {} };
next();
});
@@ -113,19 +81,6 @@ describe('File Routes - Delete with Agent Access', () => {
});
afterAll(async () => {
// Clean up all collections before disconnecting
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
// Clear only the models we added
for (const modelName of modelsToCleanup) {
if (mongoose.models[modelName]) {
delete mongoose.models[modelName];
}
}
await mongoose.disconnect();
await mongoServer.stop();
});
@@ -133,40 +88,48 @@ describe('File Routes - Delete with Agent Access', () => {
beforeEach(async () => {
jest.clearAllMocks();
// Clear database - clean up all test data
await File.deleteMany({});
await Agent.deleteMany({});
await User.deleteMany({});
await AclEntry.deleteMany({});
// Don't delete AccessRole as they are seeded defaults needed for tests
// Clear database
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
// Create test data
authorId = new mongoose.Types.ObjectId();
otherUserId = new mongoose.Types.ObjectId();
authorId = new mongoose.Types.ObjectId().toString();
otherUserId = new mongoose.Types.ObjectId().toString();
fileId = uuidv4();
// Create users in database
await User.create({
_id: authorId,
username: 'author',
email: 'author@test.com',
});
await User.create({
_id: otherUserId,
username: 'other',
email: 'other@test.com',
});
// Create a file owned by the author
await createFile({
user: authorId,
file_id: fileId,
filename: 'test.txt',
filepath: '/uploads/test.txt',
bytes: 100,
filepath: `/uploads/${authorId}/${fileId}`,
bytes: 1024,
type: 'text/plain',
});
// Create an agent with the file attached
const agent = await createAgent({
id: uuidv4(),
name: 'Test Agent',
author: authorId,
model: 'gpt-4',
provider: 'openai',
isCollaborative: true,
tool_resources: {
file_search: {
file_ids: [fileId],
},
},
});
agentId = agent.id;
// Share the agent globally
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
if (globalProject) {
const { updateAgent } = require('~/models/Agent');
await updateAgent({ id: agentId }, { projectIds: [globalProject._id] });
}
});
describe('DELETE /files', () => {
@@ -177,8 +140,8 @@ describe('File Routes - Delete with Agent Access', () => {
user: otherUserId,
file_id: userFileId,
filename: 'user-file.txt',
filepath: '/uploads/user-file.txt',
bytes: 200,
filepath: `/uploads/${otherUserId}/${userFileId}`,
bytes: 1024,
type: 'text/plain',
});
@@ -188,7 +151,7 @@ describe('File Routes - Delete with Agent Access', () => {
files: [
{
file_id: userFileId,
filepath: '/uploads/user-file.txt',
filepath: `/uploads/${otherUserId}/${userFileId}`,
},
],
});
@@ -205,7 +168,7 @@ describe('File Routes - Delete with Agent Access', () => {
files: [
{
file_id: fileId,
filepath: '/uploads/test.txt',
filepath: `/uploads/${authorId}/${fileId}`,
},
],
});
@@ -217,39 +180,14 @@ describe('File Routes - Delete with Agent Access', () => {
});
it('should allow deleting files accessible through shared agent', async () => {
// Create an agent with the file attached
const agent = await createAgent({
id: uuidv4(),
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId],
},
},
});
// Grant EDIT permission to user on the agent
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
const response = await request(app)
.delete('/files')
.send({
agent_id: agent.id,
agent_id: agentId,
files: [
{
file_id: fileId,
filepath: '/uploads/test.txt',
filepath: `/uploads/${authorId}/${fileId}`,
},
],
});
@@ -266,44 +204,19 @@ describe('File Routes - Delete with Agent Access', () => {
user: authorId,
file_id: unattachedFileId,
filename: 'unattached.txt',
filepath: '/uploads/unattached.txt',
bytes: 300,
filepath: `/uploads/${authorId}/${unattachedFileId}`,
bytes: 1024,
type: 'text/plain',
});
// Create an agent without the unattached file
const agent = await createAgent({
id: uuidv4(),
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId], // Only fileId, not unattachedFileId
},
},
});
// Grant EDIT permission to user on the agent
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
const response = await request(app)
.delete('/files')
.send({
agent_id: agent.id,
agent_id: agentId,
files: [
{
file_id: unattachedFileId,
filepath: '/uploads/unattached.txt',
filepath: `/uploads/${authorId}/${unattachedFileId}`,
},
],
});
@@ -311,7 +224,6 @@ describe('File Routes - Delete with Agent Access', () => {
expect(response.status).toBe(403);
expect(response.body.message).toBe('You can only delete files you have access to');
expect(response.body.unauthorizedFiles).toContain(unattachedFileId);
expect(processDeleteRequest).not.toHaveBeenCalled();
});
it('should handle mixed authorized and unauthorized files', async () => {
@@ -321,8 +233,8 @@ describe('File Routes - Delete with Agent Access', () => {
user: otherUserId,
file_id: userFileId,
filename: 'user-file.txt',
filepath: '/uploads/user-file.txt',
bytes: 200,
filepath: `/uploads/${otherUserId}/${userFileId}`,
bytes: 1024,
type: 'text/plain',
});
@@ -332,87 +244,51 @@ describe('File Routes - Delete with Agent Access', () => {
user: authorId,
file_id: unauthorizedFileId,
filename: 'unauthorized.txt',
filepath: '/uploads/unauthorized.txt',
bytes: 400,
filepath: `/uploads/${authorId}/${unauthorizedFileId}`,
bytes: 1024,
type: 'text/plain',
});
// Create an agent with only fileId attached
const agent = await createAgent({
id: uuidv4(),
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId],
},
},
});
// Grant EDIT permission to user on the agent
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_EDITOR,
grantedBy: authorId,
});
const response = await request(app)
.delete('/files')
.send({
agent_id: agent.id,
agent_id: agentId,
files: [
{ file_id: userFileId, filepath: '/uploads/user-file.txt' },
{ file_id: fileId, filepath: '/uploads/test.txt' },
{ file_id: unauthorizedFileId, filepath: '/uploads/unauthorized.txt' },
{
file_id: fileId, // Authorized through agent
filepath: `/uploads/${authorId}/${fileId}`,
},
{
file_id: userFileId, // Owned by user
filepath: `/uploads/${otherUserId}/${userFileId}`,
},
{
file_id: unauthorizedFileId, // Not authorized
filepath: `/uploads/${authorId}/${unauthorizedFileId}`,
},
],
});
expect(response.status).toBe(403);
expect(response.body.message).toBe('You can only delete files you have access to');
expect(response.body.unauthorizedFiles).toContain(unauthorizedFileId);
expect(processDeleteRequest).not.toHaveBeenCalled();
expect(response.body.unauthorizedFiles).not.toContain(fileId);
expect(response.body.unauthorizedFiles).not.toContain(userFileId);
});
it('should prevent deleting files when user lacks EDIT permission on agent', async () => {
// Create an agent with the file attached
const agent = await createAgent({
id: uuidv4(),
name: 'Test Agent',
provider: 'openai',
model: 'gpt-4',
author: authorId,
tool_resources: {
file_search: {
file_ids: [fileId],
},
},
});
// Grant only VIEW permission to user on the agent
const { grantPermission } = require('~/server/services/PermissionService');
await grantPermission({
principalType: PrincipalType.USER,
principalId: otherUserId,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
accessRoleId: AccessRoleIds.AGENT_VIEWER,
grantedBy: authorId,
});
it('should prevent deleting files when agent is not collaborative', async () => {
// Update the agent to be non-collaborative
const { updateAgent } = require('~/models/Agent');
await updateAgent({ id: agentId }, { isCollaborative: false });
const response = await request(app)
.delete('/files')
.send({
agent_id: agent.id,
agent_id: agentId,
files: [
{
file_id: fileId,
filepath: '/uploads/test.txt',
filepath: `/uploads/${authorId}/${fileId}`,
},
],
});

View File

@@ -1,4 +1,3 @@
const accessPermissions = require('./accessPermissions');
const assistants = require('./assistants');
const categories = require('./categories');
const tokenizer = require('./tokenizer');
@@ -29,7 +28,6 @@ const user = require('./user');
const mcp = require('./mcp');
module.exports = {
mcp,
edit,
auth,
keys,
@@ -57,5 +55,5 @@ module.exports = {
assistants,
categories,
staticRoute,
accessPermissions,
mcp,
};

View File

@@ -13,8 +13,6 @@ const { getRoleByName } = require('~/models/Role');
const router = express.Router();
const memoryPayloadLimit = express.json({ limit: '100kb' });
const checkMemoryRead = generateCheckAccess({
permissionType: PermissionTypes.MEMORIES,
permissions: [Permissions.USE, Permissions.READ],
@@ -62,7 +60,6 @@ router.get('/', checkMemoryRead, async (req, res) => {
const memoryConfig = req.app.locals?.memory;
const tokenLimit = memoryConfig?.tokenLimit;
const charLimit = memoryConfig?.charLimit || 10000;
let usagePercentage = null;
if (tokenLimit && tokenLimit > 0) {
@@ -73,7 +70,6 @@ router.get('/', checkMemoryRead, async (req, res) => {
memories: sortedMemories,
totalTokens,
tokenLimit: tokenLimit || null,
charLimit,
usagePercentage,
});
} catch (error) {
@@ -87,7 +83,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
* Body: { key: string, value: string }
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
*/
router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
router.post('/', checkMemoryCreate, async (req, res) => {
const { key, value } = req.body;
if (typeof key !== 'string' || key.trim() === '') {
@@ -98,25 +94,13 @@ router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
}
const memoryConfig = req.app.locals?.memory;
const charLimit = memoryConfig?.charLimit || 10000;
if (key.length > 1000) {
return res.status(400).json({
error: `Key exceeds maximum length of 1000 characters. Current length: ${key.length} characters.`,
});
}
if (value.length > charLimit) {
return res.status(400).json({
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
});
}
try {
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
const memories = await getAllUserMemories(req.user.id);
// Check token limit
const memoryConfig = req.app.locals?.memory;
const tokenLimit = memoryConfig?.tokenLimit;
if (tokenLimit) {
@@ -191,7 +175,7 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
* Body: { key?: string, value: string }
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
*/
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) => {
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
const { key: urlKey } = req.params;
const { key: bodyKey, value } = req.body || {};
@@ -199,23 +183,9 @@ router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) =>
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
}
// Use the key from the body if provided, otherwise use the key from the URL
const newKey = bodyKey || urlKey;
const memoryConfig = req.app.locals?.memory;
const charLimit = memoryConfig?.charLimit || 10000;
if (newKey.length > 1000) {
return res.status(400).json({
error: `Key exceeds maximum length of 1000 characters. Current length: ${newKey.length} characters.`,
});
}
if (value.length > charLimit) {
return res.status(400).json({
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
});
}
try {
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
@@ -226,6 +196,7 @@ router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) =>
return res.status(404).json({ error: 'Memory not found.' });
}
// If the key is changing, we need to handle it specially
if (newKey !== urlKey) {
const keyExists = memories.find((m) => m.key === newKey);
if (keyExists) {
@@ -248,6 +219,7 @@ router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) =>
return res.status(500).json({ error: 'Failed to delete old memory.' });
}
} else {
// Key is not changing, just update the value
const result = await setMemory({
userId: req.user.id,
key: newKey,

View File

@@ -1,10 +1,7 @@
// file deepcode ignore NoRateLimitingForLogin: Rate limiting is handled by the `loginLimiter` middleware
const express = require('express');
const passport = require('passport');
const { isEnabled } = require('@librechat/api');
const { randomState } = require('openid-client');
const { logger } = require('@librechat/data-schemas');
const { ErrorTypes } = require('librechat-data-provider');
const {
checkBan,
logHeaders,
@@ -12,8 +9,9 @@ const {
setBalanceConfig,
checkDomainAllowed,
} = require('~/server/middleware');
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
const { isEnabled } = require('~/server/utils');
const { logger } = require('~/config');
const router = express.Router();
@@ -37,7 +35,6 @@ const oauthHandler = async (req, res) => {
req.user.provider == 'openid' &&
isEnabled(process.env.OPENID_REUSE_TOKENS) === true
) {
await syncUserEntraGroupMemberships(req.user, req.user.tokenset.access_token);
setOpenIDAuthTokens(req.user.tokenset, res);
} else {
await setAuthTokens(req.user._id, res);
@@ -49,13 +46,13 @@ const oauthHandler = async (req, res) => {
};
router.get('/error', (req, res) => {
/** A single error message is pushed by passport when authentication fails. */
const errorMessage = req.session?.messages?.pop() || 'Unknown error';
// A single error message is pushed by passport when authentication fails.
logger.error('Error in OAuth authentication:', {
message: errorMessage,
message: req.session?.messages?.pop() || 'Unknown error',
});
res.redirect(`${domains.client}/login?redirect=false&error=${ErrorTypes.AUTH_FAILED}`);
// Redirect to login page with auth_failed parameter to prevent infinite redirect loops
res.redirect(`${domains.client}/login?redirect=false`);
});
/**

View File

@@ -1,45 +1,22 @@
const express = require('express');
const { logger } = require('@librechat/data-schemas');
const { generateCheckAccess } = require('@librechat/api');
const { Permissions, SystemRoles, PermissionTypes } = require('librechat-data-provider');
const {
generateCheckAccess,
markPublicPromptGroups,
buildPromptGroupFilter,
formatPromptGroupsResponse,
createEmptyPromptGroupsResponse,
filterAccessibleIdsBySharedLogic,
} = require('@librechat/api');
const {
Permissions,
SystemRoles,
ResourceType,
AccessRoleIds,
PrincipalType,
PermissionBits,
PermissionTypes,
} = require('librechat-data-provider');
const {
getListPromptGroupsByAccess,
makePromptProduction,
getPrompt,
getPrompts,
savePrompt,
deletePrompt,
getPromptGroup,
getPromptGroups,
updatePromptGroup,
deletePromptGroup,
createPromptGroup,
getPromptGroup,
deletePrompt,
getPrompts,
savePrompt,
getPrompt,
getAllPromptGroups,
// updatePromptLabels,
makePromptProduction,
} = require('~/models/Prompt');
const {
canAccessPromptGroupResource,
canAccessPromptViaGroup,
requireJwtAuth,
} = require('~/server/middleware');
const {
findPubliclyAccessibleResources,
getEffectivePermissions,
findAccessibleResources,
grantPermission,
} = require('~/server/services/PermissionService');
const { requireJwtAuth } = require('~/server/middleware');
const { getRoleByName } = require('~/models/Role');
const router = express.Router();
@@ -71,78 +48,43 @@ router.use(checkPromptAccess);
* Route to get single prompt group by its ID
* GET /groups/:groupId
*/
router.get(
'/groups/:groupId',
canAccessPromptGroupResource({
requiredPermission: PermissionBits.VIEW,
}),
async (req, res) => {
const { groupId } = req.params;
router.get('/groups/:groupId', async (req, res) => {
let groupId = req.params.groupId;
const author = req.user.id;
try {
const group = await getPromptGroup({ _id: groupId });
const query = {
_id: groupId,
$or: [{ projectIds: { $exists: true, $ne: [], $not: { $size: 0 } } }, { author }],
};
if (!group) {
return res.status(404).send({ message: 'Prompt group not found' });
}
if (req.user.role === SystemRoles.ADMIN) {
delete query.$or;
}
res.status(200).send(group);
} catch (error) {
logger.error('Error getting prompt group', error);
res.status(500).send({ message: 'Error getting prompt group' });
try {
const group = await getPromptGroup(query);
if (!group) {
return res.status(404).send({ message: 'Prompt group not found' });
}
},
);
res.status(200).send(group);
} catch (error) {
logger.error('Error getting prompt group', error);
res.status(500).send({ message: 'Error getting prompt group' });
}
});
/**
* Route to fetch all prompt groups (ACL-aware)
* GET /all
* Route to fetch all prompt groups
* GET /groups
*/
router.get('/all', async (req, res) => {
try {
const userId = req.user.id;
const { name, category, ...otherFilters } = req.query;
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
name,
category,
...otherFilters,
const groups = await getAllPromptGroups(req, {
author: req.user._id,
});
let accessibleIds = await findAccessibleResources({
userId,
role: req.user.role,
resourceType: ResourceType.PROMPTGROUP,
requiredPermissions: PermissionBits.VIEW,
});
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
resourceType: ResourceType.PROMPTGROUP,
requiredPermissions: PermissionBits.VIEW,
});
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
accessibleIds,
searchShared,
searchSharedOnly,
publicPromptGroupIds: publiclyAccessibleIds,
});
const result = await getListPromptGroupsByAccess({
accessibleIds: filteredAccessibleIds,
otherParams: filter,
});
if (!result) {
return res.status(200).send([]);
}
const { data: promptGroups = [] } = result;
if (!promptGroups.length) {
return res.status(200).send([]);
}
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
res.status(200).send(groupsWithPublicFlag);
res.status(200).send(groups);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error getting prompt groups' });
@@ -150,72 +92,16 @@ router.get('/all', async (req, res) => {
});
/**
* Route to fetch paginated prompt groups with filters (ACL-aware)
* Route to fetch paginated prompt groups with filters
* GET /groups
*/
router.get('/groups', async (req, res) => {
try {
const userId = req.user.id;
const { pageSize, pageNumber, limit, cursor, name, category, ...otherFilters } = req.query;
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
name,
category,
...otherFilters,
});
let actualLimit = limit;
let actualCursor = cursor;
if (pageSize && !limit) {
actualLimit = parseInt(pageSize, 10);
}
let accessibleIds = await findAccessibleResources({
userId,
role: req.user.role,
resourceType: ResourceType.PROMPTGROUP,
requiredPermissions: PermissionBits.VIEW,
});
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
resourceType: ResourceType.PROMPTGROUP,
requiredPermissions: PermissionBits.VIEW,
});
const filteredAccessibleIds = await filterAccessibleIdsBySharedLogic({
accessibleIds,
searchShared,
searchSharedOnly,
publicPromptGroupIds: publiclyAccessibleIds,
});
const result = await getListPromptGroupsByAccess({
accessibleIds: filteredAccessibleIds,
otherParams: filter,
limit: actualLimit,
after: actualCursor,
});
if (!result) {
const emptyResponse = createEmptyPromptGroupsResponse({ pageNumber, pageSize, actualLimit });
return res.status(200).send(emptyResponse);
}
const { data: promptGroups = [], has_more = false, after = null } = result;
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
const response = formatPromptGroupsResponse({
promptGroups: groupsWithPublicFlag,
pageNumber,
pageSize,
actualLimit,
hasMore: has_more,
after,
});
res.status(200).send(response);
const filter = req.query;
/* Note: The aggregation requires an ObjectId */
filter.author = req.user._id;
const groups = await getPromptGroups(req, filter);
res.status(200).send(groups);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error getting prompt groups' });
@@ -223,17 +109,16 @@ router.get('/groups', async (req, res) => {
});
/**
* Creates a new prompt group with initial prompt
* Updates or creates a prompt + promptGroup
* @param {object} req
* @param {TCreatePrompt} req.body
* @param {Express.Response} res
*/
const createNewPromptGroup = async (req, res) => {
const createPrompt = async (req, res) => {
try {
const { prompt, group } = req.body;
if (!prompt || !group || !group.name) {
return res.status(400).send({ error: 'Prompt and group name are required' });
if (!prompt) {
return res.status(400).send({ error: 'Prompt is required' });
}
const saveData = {
@@ -243,80 +128,21 @@ const createNewPromptGroup = async (req, res) => {
authorName: req.user.name,
};
const result = await createPromptGroup(saveData);
if (result.prompt && result.prompt._id && result.prompt.groupId) {
try {
await grantPermission({
principalType: PrincipalType.USER,
principalId: req.user.id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: result.prompt.groupId,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: req.user.id,
});
logger.debug(
`[createPromptGroup] Granted owner permissions to user ${req.user.id} for promptGroup ${result.prompt.groupId}`,
);
} catch (permissionError) {
logger.error(
`[createPromptGroup] Failed to grant owner permissions for promptGroup ${result.prompt.groupId}:`,
permissionError,
);
}
/** @type {TCreatePromptResponse} */
let result;
if (group && group.name) {
result = await createPromptGroup(saveData);
} else {
result = await savePrompt(saveData);
}
res.status(200).send(result);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error creating prompt group' });
res.status(500).send({ error: 'Error saving prompt' });
}
};
/**
* Adds a new prompt to an existing prompt group
* @param {object} req
* @param {TCreatePrompt} req.body
* @param {Express.Response} res
*/
const addPromptToGroup = async (req, res) => {
try {
const { groupId } = req.params;
const { prompt } = req.body;
if (!prompt) {
return res.status(400).send({ error: 'Prompt is required' });
}
// Ensure the prompt is associated with the correct group
prompt.groupId = groupId;
const saveData = {
prompt,
author: req.user.id,
authorName: req.user.name,
};
const result = await savePrompt(saveData);
res.status(200).send(result);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error adding prompt to group' });
}
};
// Create new prompt group (requires CREATE permission)
router.post('/', checkPromptCreate, createNewPromptGroup);
// Add prompt to existing group (requires EDIT permission on the group)
router.post(
'/groups/:groupId/prompts',
checkPromptAccess,
canAccessPromptGroupResource({
requiredPermission: PermissionBits.EDIT,
}),
addPromptToGroup,
);
router.post('/', checkPromptCreate, createPrompt);
/**
* Updates a prompt group
@@ -342,74 +168,35 @@ const patchPromptGroup = async (req, res) => {
}
};
router.patch(
'/groups/:groupId',
checkGlobalPromptShare,
canAccessPromptGroupResource({
requiredPermission: PermissionBits.EDIT,
}),
patchPromptGroup,
);
router.patch('/groups/:groupId', checkGlobalPromptShare, patchPromptGroup);
router.patch(
'/:promptId/tags/production',
checkPromptCreate,
canAccessPromptViaGroup({
requiredPermission: PermissionBits.EDIT,
resourceIdParam: 'promptId',
}),
async (req, res) => {
try {
const { promptId } = req.params;
const result = await makePromptProduction(promptId);
res.status(200).send(result);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error updating prompt production' });
}
},
);
router.get(
'/:promptId',
canAccessPromptViaGroup({
requiredPermission: PermissionBits.VIEW,
resourceIdParam: 'promptId',
}),
async (req, res) => {
router.patch('/:promptId/tags/production', checkPromptCreate, async (req, res) => {
try {
const { promptId } = req.params;
const prompt = await getPrompt({ _id: promptId });
res.status(200).send(prompt);
},
);
const result = await makePromptProduction(promptId);
res.status(200).send(result);
} catch (error) {
logger.error(error);
res.status(500).send({ error: 'Error updating prompt production' });
}
});
router.get('/:promptId', async (req, res) => {
const { promptId } = req.params;
const author = req.user.id;
const query = { _id: promptId, author };
if (req.user.role === SystemRoles.ADMIN) {
delete query.author;
}
const prompt = await getPrompt(query);
res.status(200).send(prompt);
});
router.get('/', async (req, res) => {
try {
const author = req.user.id;
const { groupId } = req.query;
// If requesting prompts for a specific group, check permissions
if (groupId) {
const permissions = await getEffectivePermissions({
userId: req.user.id,
role: req.user.role,
resourceType: ResourceType.PROMPTGROUP,
resourceId: groupId,
});
if (!(permissions & PermissionBits.VIEW)) {
return res
.status(403)
.send({ error: 'Insufficient permissions to view prompts in this group' });
}
// If user has access, fetch all prompts in the group (not just their own)
const prompts = await getPrompts({ groupId });
return res.status(200).send(prompts);
}
// If no groupId, return user's own prompts
const query = { author };
const query = { groupId, author };
if (req.user.role === SystemRoles.ADMIN) {
delete query.author;
}
@@ -453,8 +240,7 @@ const deletePromptController = async (req, res) => {
const deletePromptGroupController = async (req, res) => {
try {
const { groupId: _id } = req.params;
// Don't pass author - permissions are now checked by middleware
const message = await deletePromptGroup({ _id, role: req.user.role });
const message = await deletePromptGroup({ _id, author: req.user.id, role: req.user.role });
res.send(message);
} catch (error) {
logger.error('Error deleting prompt group', error);
@@ -462,22 +248,7 @@ const deletePromptGroupController = async (req, res) => {
}
};
router.delete(
'/:promptId',
checkPromptCreate,
canAccessPromptViaGroup({
requiredPermission: PermissionBits.DELETE,
resourceIdParam: 'promptId',
}),
deletePromptController,
);
router.delete(
'/groups/:groupId',
checkPromptCreate,
canAccessPromptGroupResource({
requiredPermission: PermissionBits.DELETE,
}),
deletePromptGroupController,
);
router.delete('/:promptId', checkPromptCreate, deletePromptController);
router.delete('/groups/:groupId', checkPromptCreate, deletePromptGroupController);
module.exports = router;

View File

@@ -1,614 +0,0 @@
const express = require('express');
const request = require('supertest');
const mongoose = require('mongoose');
const { ObjectId } = require('mongodb');
const { MongoMemoryServer } = require('mongodb-memory-server');
const {
SystemRoles,
ResourceType,
AccessRoleIds,
PrincipalType,
PermissionBits,
} = require('librechat-data-provider');
// Mock modules before importing
jest.mock('~/server/services/Config', () => ({
getCachedTools: jest.fn().mockResolvedValue({}),
getCustomConfig: jest.fn(),
}));
jest.mock('~/models/Role', () => ({
getRoleByName: jest.fn(),
}));
jest.mock('~/server/middleware', () => ({
requireJwtAuth: (req, res, next) => next(),
canAccessPromptViaGroup: jest.requireActual('~/server/middleware').canAccessPromptViaGroup,
canAccessPromptGroupResource:
jest.requireActual('~/server/middleware').canAccessPromptGroupResource,
}));
let app;
let mongoServer;
let promptRoutes;
let Prompt, PromptGroup, AclEntry, AccessRole, User;
let testUsers, testRoles;
let grantPermission;
// Helper function to set user in middleware
function setTestUser(app, user) {
app.use((req, res, next) => {
req.user = {
...(user.toObject ? user.toObject() : user),
id: user.id || user._id.toString(),
_id: user._id,
name: user.name,
role: user.role,
};
if (user.role === SystemRoles.ADMIN) {
console.log('Setting admin user with role:', req.user.role);
}
next();
});
}
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
// Initialize models
const dbModels = require('~/db/models');
Prompt = dbModels.Prompt;
PromptGroup = dbModels.PromptGroup;
AclEntry = dbModels.AclEntry;
AccessRole = dbModels.AccessRole;
User = dbModels.User;
// Import permission service
const permissionService = require('~/server/services/PermissionService');
grantPermission = permissionService.grantPermission;
// Create test data
await setupTestData();
// Setup Express app
app = express();
app.use(express.json());
// Mock authentication middleware - default to owner
setTestUser(app, testUsers.owner);
// Import routes after mocks are set up
promptRoutes = require('./prompts');
app.use('/api/prompts', promptRoutes);
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
jest.clearAllMocks();
});
async function setupTestData() {
// Create access roles for promptGroups
testRoles = {
viewer: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
name: 'Viewer',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW,
}),
editor: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
name: 'Editor',
resourceType: ResourceType.PROMPTGROUP,
permBits: PermissionBits.VIEW | PermissionBits.EDIT,
}),
owner: await AccessRole.create({
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
name: 'Owner',
resourceType: ResourceType.PROMPTGROUP,
permBits:
PermissionBits.VIEW | PermissionBits.EDIT | PermissionBits.DELETE | PermissionBits.SHARE,
}),
};
// Create test users
testUsers = {
owner: await User.create({
id: new ObjectId().toString(),
_id: new ObjectId(),
name: 'Prompt Owner',
email: 'owner@example.com',
role: SystemRoles.USER,
}),
viewer: await User.create({
id: new ObjectId().toString(),
_id: new ObjectId(),
name: 'Prompt Viewer',
email: 'viewer@example.com',
role: SystemRoles.USER,
}),
editor: await User.create({
id: new ObjectId().toString(),
_id: new ObjectId(),
name: 'Prompt Editor',
email: 'editor@example.com',
role: SystemRoles.USER,
}),
noAccess: await User.create({
id: new ObjectId().toString(),
_id: new ObjectId(),
name: 'No Access',
email: 'noaccess@example.com',
role: SystemRoles.USER,
}),
admin: await User.create({
id: new ObjectId().toString(),
_id: new ObjectId(),
name: 'Admin',
email: 'admin@example.com',
role: SystemRoles.ADMIN,
}),
};
// Mock getRoleByName
const { getRoleByName } = require('~/models/Role');
getRoleByName.mockImplementation((roleName) => {
switch (roleName) {
case SystemRoles.USER:
return { permissions: { PROMPTS: { USE: true, CREATE: true } } };
case SystemRoles.ADMIN:
return { permissions: { PROMPTS: { USE: true, CREATE: true, SHARED_GLOBAL: true } } };
default:
return null;
}
});
}
describe('Prompt Routes - ACL Permissions', () => {
let consoleErrorSpy;
beforeEach(() => {
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
});
afterEach(() => {
consoleErrorSpy.mockRestore();
});
// Simple test to verify route is loaded
it('should have routes loaded', async () => {
// This should at least not crash
const response = await request(app).get('/api/prompts/test-404');
console.log('Test 404 response status:', response.status);
console.log('Test 404 response body:', response.body);
// We expect a 401 or 404, not 500
expect(response.status).not.toBe(500);
});
describe('POST /api/prompts - Create Prompt', () => {
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('should create a prompt and grant owner permissions', async () => {
const promptData = {
prompt: {
prompt: 'Test prompt content',
type: 'text',
},
group: {
name: 'Test Prompt Group',
},
};
const response = await request(app).post('/api/prompts').send(promptData);
if (response.status !== 200) {
console.log('POST /api/prompts error status:', response.status);
console.log('POST /api/prompts error body:', response.body);
console.log('Console errors:', consoleErrorSpy.mock.calls);
}
expect(response.status).toBe(200);
expect(response.body.prompt).toBeDefined();
expect(response.body.prompt.prompt).toBe(promptData.prompt.prompt);
// Check ACL entry was created
const aclEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: response.body.prompt.groupId,
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
});
expect(aclEntry).toBeTruthy();
expect(aclEntry.roleId.toString()).toBe(testRoles.owner._id.toString());
});
it('should create a prompt group with prompt and grant owner permissions', async () => {
const promptData = {
prompt: {
prompt: 'Group prompt content',
// Remove 'name' from prompt - it's not in the schema
},
group: {
name: 'Test Group',
category: 'testing',
},
};
const response = await request(app).post('/api/prompts').send(promptData).expect(200);
expect(response.body.prompt).toBeDefined();
expect(response.body.group).toBeDefined();
expect(response.body.group.name).toBe(promptData.group.name);
// Check ACL entry was created for the promptGroup
const aclEntry = await AclEntry.findOne({
resourceType: ResourceType.PROMPTGROUP,
resourceId: response.body.group._id,
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
});
expect(aclEntry).toBeTruthy();
});
});
describe('GET /api/prompts/:promptId - Get Prompt', () => {
let testPrompt;
let testGroup;
beforeEach(async () => {
// Create a prompt group first
testGroup = await PromptGroup.create({
name: 'Test Group',
category: 'testing',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
// Create a prompt
testPrompt = await Prompt.create({
prompt: 'Test prompt for retrieval',
name: 'Get Test',
author: testUsers.owner._id,
type: 'text',
groupId: testGroup._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('should retrieve prompt when user has view permissions', async () => {
// Grant view permissions on the promptGroup
await grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.owner._id,
});
const response = await request(app).get(`/api/prompts/${testPrompt._id}`);
expect(response.status).toBe(200);
expect(response.body._id).toBe(testPrompt._id.toString());
expect(response.body.prompt).toBe(testPrompt.prompt);
});
it('should deny access when user has no permissions', async () => {
// Change the user to one without access
setTestUser(app, testUsers.noAccess);
const response = await request(app).get(`/api/prompts/${testPrompt._id}`).expect(403);
// Verify error response
expect(response.body.error).toBe('Forbidden');
expect(response.body.message).toBe('Insufficient permissions to access this promptGroup');
});
it('should allow admin access without explicit permissions', async () => {
// First, reset the app to remove previous middleware
app = express();
app.use(express.json());
// Set admin user BEFORE adding routes
app.use((req, res, next) => {
req.user = {
...testUsers.admin.toObject(),
id: testUsers.admin._id.toString(),
_id: testUsers.admin._id,
name: testUsers.admin.name,
role: testUsers.admin.role,
};
next();
});
// Now add the routes
const promptRoutes = require('./prompts');
app.use('/api/prompts', promptRoutes);
console.log('Admin user:', testUsers.admin);
console.log('Admin role:', testUsers.admin.role);
console.log('SystemRoles.ADMIN:', SystemRoles.ADMIN);
const response = await request(app).get(`/api/prompts/${testPrompt._id}`).expect(200);
expect(response.body._id).toBe(testPrompt._id.toString());
});
});
describe('DELETE /api/prompts/:promptId - Delete Prompt', () => {
let testPrompt;
let testGroup;
beforeEach(async () => {
// Create group with prompt
testGroup = await PromptGroup.create({
name: 'Delete Test Group',
category: 'testing',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
testPrompt = await Prompt.create({
prompt: 'Test prompt for deletion',
name: 'Delete Test',
author: testUsers.owner._id,
type: 'text',
groupId: testGroup._id,
});
// Add prompt to group
testGroup.productionId = testPrompt._id;
testGroup.promptIds = [testPrompt._id];
await testGroup.save();
// Grant owner permissions on the promptGroup
await grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
grantedBy: testUsers.owner._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('should delete prompt when user has delete permissions', async () => {
const response = await request(app)
.delete(`/api/prompts/${testPrompt._id}`)
.query({ groupId: testGroup._id.toString() })
.expect(200);
expect(response.body.prompt).toBe('Prompt deleted successfully');
// Verify prompt was deleted
const deletedPrompt = await Prompt.findById(testPrompt._id);
expect(deletedPrompt).toBeNull();
// Verify ACL entries were removed
const aclEntries = await AclEntry.find({
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
});
expect(aclEntries).toHaveLength(0);
});
it('should deny deletion when user lacks delete permissions', async () => {
// Create a prompt as a different user (not the one trying to delete)
const authorPrompt = await Prompt.create({
prompt: 'Test prompt by another user',
name: 'Another User Prompt',
author: testUsers.editor._id, // Different author
type: 'text',
groupId: testGroup._id,
});
// Grant only viewer permissions to viewer user on the promptGroup
await grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.editor._id,
});
// Recreate app with viewer user
app = express();
app.use(express.json());
app.use((req, res, next) => {
req.user = {
...testUsers.viewer.toObject(),
id: testUsers.viewer._id.toString(),
_id: testUsers.viewer._id,
name: testUsers.viewer.name,
role: testUsers.viewer.role,
};
next();
});
const promptRoutes = require('./prompts');
app.use('/api/prompts', promptRoutes);
await request(app)
.delete(`/api/prompts/${authorPrompt._id}`)
.query({ groupId: testGroup._id.toString() })
.expect(403);
// Verify prompt still exists
const prompt = await Prompt.findById(authorPrompt._id);
expect(prompt).toBeTruthy();
});
});
describe('PATCH /api/prompts/:promptId/tags/production - Make Production', () => {
let testPrompt;
let testGroup;
beforeEach(async () => {
// Create group
testGroup = await PromptGroup.create({
name: 'Production Test Group',
category: 'testing',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
testPrompt = await Prompt.create({
prompt: 'Test prompt for production',
name: 'Production Test',
author: testUsers.owner._id,
type: 'text',
groupId: testGroup._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('should make prompt production when user has edit permissions', async () => {
// Grant edit permissions on the promptGroup
await grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.owner._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_EDITOR,
grantedBy: testUsers.owner._id,
});
// Recreate app to ensure fresh middleware
app = express();
app.use(express.json());
app.use((req, res, next) => {
req.user = {
...testUsers.owner.toObject(),
id: testUsers.owner._id.toString(),
_id: testUsers.owner._id,
name: testUsers.owner.name,
role: testUsers.owner.role,
};
next();
});
const promptRoutes = require('./prompts');
app.use('/api/prompts', promptRoutes);
const response = await request(app)
.patch(`/api/prompts/${testPrompt._id}/tags/production`)
.expect(200);
expect(response.body.message).toBe('Prompt production made successfully');
// Verify the group was updated
const updatedGroup = await PromptGroup.findById(testGroup._id);
expect(updatedGroup.productionId.toString()).toBe(testPrompt._id.toString());
});
it('should deny making production when user lacks edit permissions', async () => {
// Grant only view permissions to viewer on the promptGroup
await grantPermission({
principalType: PrincipalType.USER,
principalId: testUsers.viewer._id,
resourceType: ResourceType.PROMPTGROUP,
resourceId: testGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.owner._id,
});
// Recreate app with viewer user
app = express();
app.use(express.json());
app.use((req, res, next) => {
req.user = {
...testUsers.viewer.toObject(),
id: testUsers.viewer._id.toString(),
_id: testUsers.viewer._id,
name: testUsers.viewer.name,
role: testUsers.viewer.role,
};
next();
});
const promptRoutes = require('./prompts');
app.use('/api/prompts', promptRoutes);
await request(app).patch(`/api/prompts/${testPrompt._id}/tags/production`).expect(403);
// Verify prompt hasn't changed
const unchangedGroup = await PromptGroup.findById(testGroup._id);
expect(unchangedGroup.productionId.toString()).not.toBe(testPrompt._id.toString());
});
});
describe('Public Access', () => {
let publicPrompt;
let publicGroup;
beforeEach(async () => {
// Create a prompt group
publicGroup = await PromptGroup.create({
name: 'Public Test Group',
category: 'testing',
author: testUsers.owner._id,
authorName: testUsers.owner.name,
productionId: new ObjectId(),
});
// Create a public prompt
publicPrompt = await Prompt.create({
prompt: 'Public prompt content',
name: 'Public Test',
author: testUsers.owner._id,
type: 'text',
groupId: publicGroup._id,
});
// Grant public viewer access on the promptGroup
await grantPermission({
principalType: PrincipalType.PUBLIC,
principalId: null,
resourceType: ResourceType.PROMPTGROUP,
resourceId: publicGroup._id,
accessRoleId: AccessRoleIds.PROMPTGROUP_VIEWER,
grantedBy: testUsers.owner._id,
});
});
afterEach(async () => {
await Prompt.deleteMany({});
await PromptGroup.deleteMany({});
await AclEntry.deleteMany({});
});
it('should allow any user to view public prompts', async () => {
// Change user to someone without explicit permissions
setTestUser(app, testUsers.noAccess);
const response = await request(app).get(`/api/prompts/${publicPrompt._id}`).expect(200);
expect(response.body._id).toBe(publicPrompt._id.toString());
});
});
});

View File

@@ -1,13 +1,11 @@
const express = require('express');
const {
SystemRoles,
roleDefaults,
PermissionTypes,
agentPermissionsSchema,
promptPermissionsSchema,
memoryPermissionsSchema,
marketplacePermissionsSchema,
peoplePickerPermissionsSchema,
agentPermissionsSchema,
PermissionTypes,
roleDefaults,
SystemRoles,
} = require('librechat-data-provider');
const { checkAdmin, requireJwtAuth } = require('~/server/middleware');
const { updateRoleByName, getRoleByName } = require('~/models/Role');
@@ -15,81 +13,6 @@ const { updateRoleByName, getRoleByName } = require('~/models/Role');
const router = express.Router();
router.use(requireJwtAuth);
/**
* Permission configuration mapping
* Maps route paths to their corresponding schemas and permission types
*/
const permissionConfigs = {
prompts: {
schema: promptPermissionsSchema,
permissionType: PermissionTypes.PROMPTS,
errorMessage: 'Invalid prompt permissions.',
},
agents: {
schema: agentPermissionsSchema,
permissionType: PermissionTypes.AGENTS,
errorMessage: 'Invalid agent permissions.',
},
memories: {
schema: memoryPermissionsSchema,
permissionType: PermissionTypes.MEMORIES,
errorMessage: 'Invalid memory permissions.',
},
'people-picker': {
schema: peoplePickerPermissionsSchema,
permissionType: PermissionTypes.PEOPLE_PICKER,
errorMessage: 'Invalid people picker permissions.',
},
marketplace: {
schema: marketplacePermissionsSchema,
permissionType: PermissionTypes.MARKETPLACE,
errorMessage: 'Invalid marketplace permissions.',
},
};
/**
* Generic handler for updating permissions
* @param {string} permissionKey - The key from permissionConfigs
* @returns {Function} Express route handler
*/
const createPermissionUpdateHandler = (permissionKey) => {
const config = permissionConfigs[permissionKey];
return async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
const updates = req.body;
try {
const parsedUpdates = config.schema.partial().parse(updates);
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const currentPermissions =
role.permissions?.[config.permissionType] || role[config.permissionType] || {};
const mergedUpdates = {
permissions: {
...role.permissions,
[config.permissionType]: {
...currentPermissions,
...parsedUpdates,
},
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: config.errorMessage, error: error.errors });
}
};
};
/**
* GET /api/roles/:roleName
* Get a specific role by name
@@ -122,30 +45,117 @@ router.get('/:roleName', async (req, res) => {
* PUT /api/roles/:roleName/prompts
* Update prompt permissions for a specific role
*/
router.put('/:roleName/prompts', checkAdmin, createPermissionUpdateHandler('prompts'));
router.put('/:roleName/prompts', checkAdmin, async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
/** @type {TRole['permissions']['PROMPTS']} */
const updates = req.body;
try {
const parsedUpdates = promptPermissionsSchema.partial().parse(updates);
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const currentPermissions =
role.permissions?.[PermissionTypes.PROMPTS] || role[PermissionTypes.PROMPTS] || {};
const mergedUpdates = {
permissions: {
...role.permissions,
[PermissionTypes.PROMPTS]: {
...currentPermissions,
...parsedUpdates,
},
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: 'Invalid prompt permissions.', error: error.errors });
}
});
/**
* PUT /api/roles/:roleName/agents
* Update agent permissions for a specific role
*/
router.put('/:roleName/agents', checkAdmin, createPermissionUpdateHandler('agents'));
router.put('/:roleName/agents', checkAdmin, async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
/** @type {TRole['permissions']['AGENTS']} */
const updates = req.body;
try {
const parsedUpdates = agentPermissionsSchema.partial().parse(updates);
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const currentPermissions =
role.permissions?.[PermissionTypes.AGENTS] || role[PermissionTypes.AGENTS] || {};
const mergedUpdates = {
permissions: {
...role.permissions,
[PermissionTypes.AGENTS]: {
...currentPermissions,
...parsedUpdates,
},
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: 'Invalid agent permissions.', error: error.errors });
}
});
/**
* PUT /api/roles/:roleName/memories
* Update memory permissions for a specific role
*/
router.put('/:roleName/memories', checkAdmin, createPermissionUpdateHandler('memories'));
router.put('/:roleName/memories', checkAdmin, async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
/** @type {TRole['permissions']['MEMORIES']} */
const updates = req.body;
/**
* PUT /api/roles/:roleName/people-picker
* Update people picker permissions for a specific role
*/
router.put('/:roleName/people-picker', checkAdmin, createPermissionUpdateHandler('people-picker'));
try {
const parsedUpdates = memoryPermissionsSchema.partial().parse(updates);
/**
* PUT /api/roles/:roleName/marketplace
* Update marketplace permissions for a specific role
*/
router.put('/:roleName/marketplace', checkAdmin, createPermissionUpdateHandler('marketplace'));
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const currentPermissions =
role.permissions?.[PermissionTypes.MEMORIES] || role[PermissionTypes.MEMORIES] || {};
const mergedUpdates = {
permissions: {
...role.permissions,
[PermissionTypes.MEMORIES]: {
...currentPermissions,
...parsedUpdates,
},
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: 'Invalid memory permissions.', error: error.errors });
}
});
module.exports = router;

View File

@@ -1,11 +1,9 @@
jest.mock('~/models', () => ({
initializeRoles: jest.fn(),
seedDefaultRoles: jest.fn(),
ensureDefaultCategories: jest.fn(),
}));
jest.mock('~/models/Role', () => ({
updateAccessPermissions: jest.fn(),
getRoleByName: jest.fn().mockResolvedValue(null),
getRoleByName: jest.fn(),
updateRoleByName: jest.fn(),
}));
@@ -89,76 +87,4 @@ describe('AppService interface configuration', () => {
expect(app.locals.interfaceConfig.bookmarks).toBe(false);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should correctly configure peoplePicker permissions including roles', async () => {
mockLoadCustomConfig.mockResolvedValue({
interface: {
peoplePicker: {
users: true,
groups: true,
roles: true,
},
},
});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: true,
roles: true,
},
});
await AppService(app);
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
users: true,
groups: true,
roles: true,
});
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should handle mixed peoplePicker permissions', async () => {
mockLoadCustomConfig.mockResolvedValue({
interface: {
peoplePicker: {
users: true,
groups: false,
roles: true,
},
},
});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: false,
roles: true,
},
});
await AppService(app);
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(false);
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
});
it('should set default peoplePicker permissions when not provided', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: true,
roles: true,
},
});
await AppService(app);
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(true);
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
});
});

View File

@@ -1,8 +1,9 @@
const { loadMemoryConfig, agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
const { agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
const {
FileSources,
loadOCRConfig,
EModelEndpoint,
loadMemoryConfig,
getConfigDefaults,
} = require('librechat-data-provider');
const {
@@ -15,7 +16,6 @@ const {
const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants');
const { initializeAzureBlobService } = require('./Files/Azure/initialize');
const { initializeFirebase } = require('./Files/Firebase/initialize');
const { seedDefaultRoles, initializeRoles, ensureDefaultCategories } = require('~/models');
const loadCustomConfig = require('./Config/loadCustomConfig');
const handleRateLimits = require('./Config/handleRateLimits');
const { loadDefaultInterface } = require('./start/interface');
@@ -25,6 +25,7 @@ const { processModelSpecs } = require('./start/modelSpecs');
const { initializeS3 } = require('./Files/S3/initialize');
const { loadAndFormatTools } = require('./ToolService');
const { isEnabled } = require('~/server/utils');
const { initializeRoles } = require('~/models');
const { setCachedTools } = require('./Config');
const paths = require('~/config/paths');
@@ -35,8 +36,6 @@ const paths = require('~/config/paths');
*/
const AppService = async (app) => {
await initializeRoles();
await seedDefaultRoles();
await ensureDefaultCategories();
/** @type {TCustomConfig} */
const config = (await loadCustomConfig()) ?? {};
const configDefaults = getConfigDefaults();
@@ -86,7 +85,6 @@ const AppService = async (app) => {
const turnstileConfig = loadTurnstileConfig(config, configDefaults);
const defaultLocals = {
config,
ocr,
paths,
memory,

View File

@@ -28,12 +28,9 @@ jest.mock('./Files/Firebase/initialize', () => ({
}));
jest.mock('~/models', () => ({
initializeRoles: jest.fn(),
seedDefaultRoles: jest.fn(),
ensureDefaultCategories: jest.fn(),
}));
jest.mock('~/models/Role', () => ({
updateAccessPermissions: jest.fn(),
getRoleByName: jest.fn().mockResolvedValue(null),
}));
jest.mock('./Config', () => ({
setCachedTools: jest.fn(),
@@ -134,9 +131,6 @@ describe('AppService', () => {
expect(process.env.CDN_PROVIDER).toEqual('testStrategy');
expect(app.locals).toEqual({
config: expect.objectContaining({
fileStrategy: 'testStrategy',
}),
socialLogins: ['testLogin'],
fileStrategy: 'testStrategy',
interfaceConfig: expect.objectContaining({
@@ -171,9 +165,6 @@ describe('AppService', () => {
agents: {
disableBuilder: false,
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
maxCitations: 30,
maxCitationsPerFile: 7,
minRelevanceScore: 0.45,
},
});
});
@@ -779,7 +770,6 @@ describe('AppService updating app.locals and issuing warnings', () => {
expect(app.locals).toBeDefined();
expect(app.locals.paths).toBeDefined();
expect(app.locals.config).toEqual({});
expect(app.locals.fileStrategy).toEqual(FileSources.local);
expect(app.locals.socialLogins).toEqual(defaultSocialLogins);
expect(app.locals.balance).toEqual(
@@ -812,7 +802,6 @@ describe('AppService updating app.locals and issuing warnings', () => {
expect(app.locals).toBeDefined();
expect(app.locals.paths).toBeDefined();
expect(app.locals.config).toEqual(customConfig);
expect(app.locals.fileStrategy).toEqual(customConfig.fileStrategy);
expect(app.locals.socialLogins).toEqual(customConfig.registration.socialLogins);
expect(app.locals.balance).toEqual(customConfig.balance);
@@ -970,48 +959,4 @@ describe('AppService updating app.locals and issuing warnings', () => {
expect(app.locals.ocr.strategy).toEqual('mistral_ocr');
expect(app.locals.ocr.mistralModel).toEqual('mistral-medium');
});
it('should correctly configure peoplePicker permissions when specified', async () => {
const mockConfig = {
interface: {
peoplePicker: {
users: true,
groups: true,
roles: true,
},
},
};
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
const app = { locals: {} };
await AppService(app);
// Check that interface config includes the permissions
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
expect(app.locals.interfaceConfig.peoplePicker).toMatchObject({
users: true,
groups: true,
roles: true,
});
});
it('should use default peoplePicker permissions when not specified', async () => {
const mockConfig = {
interface: {
// No peoplePicker configuration
},
};
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
const app = { locals: {} };
await AppService(app);
// Check that default permissions are applied
expect(app.locals.interfaceConfig.peoplePicker).toBeDefined();
expect(app.locals.interfaceConfig.peoplePicker.users).toBe(true);
expect(app.locals.interfaceConfig.peoplePicker.groups).toBe(true);
expect(app.locals.interfaceConfig.peoplePicker.roles).toBe(true);
});
});

View File

@@ -60,14 +60,7 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
// Find boundaries between ARTIFACT_START and ARTIFACT_END
const contentStart = artifactContent.indexOf('\n', artifactContent.indexOf(ARTIFACT_START)) + 1;
let contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
// Special case: if contentEnd is 0, it means the only ::: found is at the start of :::artifact
// This indicates an incomplete artifact (no closing :::)
// We need to check that it's exactly at position 0 (the beginning of artifactContent)
if (contentEnd === 0 && artifactContent.indexOf(ARTIFACT_START) === 0) {
contentEnd = artifactContent.length;
}
const contentEnd = artifactContent.lastIndexOf(ARTIFACT_END);
if (contentStart === -1 || contentEnd === -1) {
return null;
@@ -79,20 +72,12 @@ const replaceArtifactContent = (originalText, artifact, original, updated) => {
// Determine where to look for the original content
let searchStart, searchEnd;
if (codeBlockStart !== -1) {
// Code block starts
if (codeBlockStart !== -1 && codeBlockEnd !== -1) {
// If code blocks exist, search between them
searchStart = codeBlockStart + 4; // after ```\n
if (codeBlockEnd !== -1 && codeBlockEnd > codeBlockStart) {
// Code block has proper ending
searchEnd = codeBlockEnd;
} else {
// No closing backticks found or they're before the opening (shouldn't happen)
// This might be an incomplete artifact - search to contentEnd
searchEnd = contentEnd;
}
searchEnd = codeBlockEnd;
} else {
// No code blocks at all
// Otherwise search in the whole artifact content
searchStart = contentStart;
searchEnd = contentEnd;
}

View File

@@ -89,9 +89,9 @@ describe('replaceArtifactContent', () => {
};
test('should replace content within artifact boundaries', () => {
const original = "console.log('hello')";
const original = 'console.log(\'hello\')';
const artifact = createTestArtifact(original);
const updated = "console.log('updated')";
const updated = 'console.log(\'updated\')';
const result = replaceArtifactContent(artifact.text, artifact, original, updated);
expect(result).toContain(updated);
@@ -317,182 +317,4 @@ console.log(greeting);`;
expect(result).not.toContain('\n\n```');
expect(result).not.toContain('```\n\n');
});
describe('incomplete artifacts', () => {
test('should handle incomplete artifacts (missing closing ::: and ```)', () => {
const original = `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Pomodoro</title>
<meta name="description" content="A single-file Pomodoro timer with logs, charts, sounds, and dark mode." />
<style>
:root{`;
const prefix = `Awesome idea! I'll deliver a complete single-file HTML app called "Pomodoro" with:
- Custom session/break durations
You can save this as pomodoro.html and open it directly in your browser.
`;
// This simulates the real incomplete artifact case - no closing ``` or :::
const incompleteArtifact = `${ARTIFACT_START}{identifier="pomodoro-single-file-app" type="text/html" title="Pomodoro — Single File App"}
\`\`\`
${original}`;
const fullText = prefix + incompleteArtifact;
const message = { text: fullText };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
expect(artifacts[0].end).toBe(fullText.length);
const updated = original.replace('Pomodoro</title>', 'Pomodoro</title>UPDATED');
const result = replaceArtifactContent(fullText, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain('UPDATED');
expect(result).toContain(prefix);
// Should not have added closing markers
expect(result).not.toMatch(/:::\s*$/);
});
test('should handle incomplete artifacts with only opening code block', () => {
const original = 'function hello() { console.log("world"); }';
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n\`\`\`\n${original}`;
const message = { text: incompleteArtifact };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
const updated = 'function hello() { console.log("UPDATED"); }';
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain('UPDATED');
});
test('should handle incomplete artifacts without code blocks', () => {
const original = 'Some plain text content';
const incompleteArtifact = `${ARTIFACT_START}{id="test"}\n${original}`;
const message = { text: incompleteArtifact };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
const updated = 'Some UPDATED text content';
const result = replaceArtifactContent(incompleteArtifact, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain('UPDATED');
});
});
describe('regression tests for edge cases', () => {
test('should still handle complete artifacts correctly', () => {
// Ensure we didn't break normal artifact handling
const original = 'console.log("test");';
const artifact = createArtifactText({ content: original });
const message = { text: artifact };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
const updated = 'console.log("updated");';
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain(updated);
expect(result).toContain(ARTIFACT_END);
expect(result).toMatch(/```\nconsole\.log\("updated"\);\n```/);
});
test('should handle multiple complete artifacts', () => {
// Ensure multiple artifacts still work
const content1 = 'First artifact';
const content2 = 'Second artifact';
const text = `${createArtifactText({ content: content1 })}\n\n${createArtifactText({ content: content2 })}`;
const message = { text };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(2);
// Update first artifact
const result1 = replaceArtifactContent(text, artifacts[0], content1, 'First UPDATED');
expect(result1).not.toBeNull();
expect(result1).toContain('First UPDATED');
expect(result1).toContain(content2);
// Update second artifact
const result2 = replaceArtifactContent(text, artifacts[1], content2, 'Second UPDATED');
expect(result2).not.toBeNull();
expect(result2).toContain(content1);
expect(result2).toContain('Second UPDATED');
});
test('should not mistake ::: at position 0 for artifact end in complete artifacts', () => {
// This tests the specific fix - ensuring contentEnd=0 doesn't break complete artifacts
const original = 'test content';
// Create an artifact that will have ::: at position 0 when substring'd
const artifact = `${ARTIFACT_START}\n\`\`\`\n${original}\n\`\`\`\n${ARTIFACT_END}`;
const message = { text: artifact };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
const updated = 'updated content';
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain(updated);
expect(result).toContain(ARTIFACT_END);
});
test('should handle empty artifacts', () => {
// Edge case: empty artifact
const artifact = `${ARTIFACT_START}\n${ARTIFACT_END}`;
const message = { text: artifact };
const artifacts = findAllArtifacts(message);
expect(artifacts).toHaveLength(1);
// Trying to replace non-existent content should return null
const result = replaceArtifactContent(artifact, artifacts[0], 'something', 'updated');
expect(result).toBeNull();
});
test('should preserve whitespace and formatting in complete artifacts', () => {
const original = ` function test() {
return {
value: 42
};
}`;
const artifact = createArtifactText({ content: original });
const message = { text: artifact };
const artifacts = findAllArtifacts(message);
const updated = ` function test() {
return {
value: 100
};
}`;
const result = replaceArtifactContent(artifact, artifacts[0], original, updated);
expect(result).not.toBeNull();
expect(result).toContain('value: 100');
// Should preserve exact formatting
expect(result).toMatch(
/```\n {2}function test\(\) \{\n {4}return \{\n {6}value: 100\n {4}\};\n {2}\}\n```/,
);
});
});
});

View File

@@ -1,11 +1,12 @@
const { logger } = require('@librechat/data-schemas');
const { EModelEndpoint } = require('librechat-data-provider');
const { useAzurePlugins } = require('~/server/services/Config/EndpointService').config;
const {
getAnthropicModels,
getBedrockModels,
getOpenAIModels,
getGoogleModels,
getBedrockModels,
getAnthropicModels,
} = require('~/server/services/ModelService');
const { logger } = require('~/config');
/**
* Loads the default models for the application.
@@ -15,42 +16,58 @@ const {
*/
async function loadDefaultModels(req) {
try {
const [openAI, anthropic, azureOpenAI, assistants, azureAssistants, google, bedrock] =
await Promise.all([
getOpenAIModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching OpenAI models:', error);
const [
openAI,
anthropic,
azureOpenAI,
gptPlugins,
assistants,
azureAssistants,
google,
bedrock,
] = await Promise.all([
getOpenAIModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching OpenAI models:', error);
return [];
}),
getAnthropicModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching Anthropic models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: useAzurePlugins, plugins: true }).catch(
(error) => {
logger.error('Error fetching Plugin models:', error);
return [];
}),
getAnthropicModels({ user: req.user.id }).catch((error) => {
logger.error('Error fetching Anthropic models:', error);
return [];
}),
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI models:', error);
return [];
}),
getOpenAIModels({ assistants: true }).catch((error) => {
logger.error('Error fetching OpenAI Assistants API models:', error);
return [];
}),
getOpenAIModels({ azureAssistants: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
return [];
}),
Promise.resolve(getGoogleModels()).catch((error) => {
logger.error('Error getting Google models:', error);
return [];
}),
Promise.resolve(getBedrockModels()).catch((error) => {
logger.error('Error getting Bedrock models:', error);
return [];
}),
]);
},
),
getOpenAIModels({ assistants: true }).catch((error) => {
logger.error('Error fetching OpenAI Assistants API models:', error);
return [];
}),
getOpenAIModels({ azureAssistants: true }).catch((error) => {
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
return [];
}),
Promise.resolve(getGoogleModels()).catch((error) => {
logger.error('Error getting Google models:', error);
return [];
}),
Promise.resolve(getBedrockModels()).catch((error) => {
logger.error('Error getting Bedrock models:', error);
return [];
}),
]);
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.agents]: openAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.anthropic]: anthropic,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.assistants]: assistants,
[EModelEndpoint.azureAssistants]: azureAssistants,

View File

@@ -1,6 +1,6 @@
const { logger } = require('@librechat/data-schemas');
const { isAgentsEndpoint, removeNullishValues, Constants } = require('librechat-data-provider');
const { loadAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const buildOptions = (req, endpoint, parsedBody, endpointType) => {
const { spec, iconURL, agent_id, instructions, ...model_parameters } = parsedBody;

View File

@@ -1,5 +1,4 @@
const { logger } = require('@librechat/data-schemas');
const { validateAgentModel } = require('@librechat/api');
const { createContentAggregator } = require('@librechat/agents');
const {
Constants,
@@ -12,12 +11,10 @@ const {
getDefaultHandlers,
} = require('~/server/controllers/agents/callbacks');
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client');
const { getAgent } = require('~/models/Agent');
const { logViolation } = require('~/cache');
function createToolLoader() {
/**
@@ -75,19 +72,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error('Agent not found');
}
const modelsConfig = await getModelsConfig(req);
const validationResult = await validateAgentModel({
req,
res,
modelsConfig,
logViolation,
agent: primaryAgent,
});
if (!validationResult.isValid) {
throw new Error(validationResult.error?.message);
}
const agentConfigs = new Map();
/** @type {Set<string>} */
const allowedProviders = new Set(req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders);
@@ -117,19 +101,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
if (!agent) {
throw new Error(`Agent ${agentId} not found`);
}
const validationResult = await validateAgentModel({
req,
res,
agent,
modelsConfig,
logViolation,
});
if (!validationResult.isValid) {
throw new Error(validationResult.error?.message);
}
const config = await initializeAgent({
req,
res,

View File

@@ -1,149 +0,0 @@
const { nanoid } = require('nanoid');
const { checkAccess } = require('@librechat/api');
const { Tools, PermissionTypes, Permissions } = require('librechat-data-provider');
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
const { Files } = require('~/models');
/**
* Process file search results from tool calls
* @param {Object} options
* @param {IUser} options.user - The user object
* @param {GraphRunnableConfig['configurable']} options.metadata - The metadata
* @param {any} options.toolArtifact - The tool artifact containing structured data
* @param {string} options.toolCallId - The tool call ID
* @returns {Promise<Object|null>} The file search attachment or null
*/
async function processFileCitations({ user, toolArtifact, toolCallId, metadata }) {
try {
if (!toolArtifact?.[Tools.file_search]?.sources) {
return null;
}
if (user) {
try {
const hasFileCitationsAccess = await checkAccess({
user,
permissionType: PermissionTypes.FILE_CITATIONS,
permissions: [Permissions.USE],
getRoleByName,
});
if (!hasFileCitationsAccess) {
logger.debug(
`[processFileCitations] User ${user.id} does not have FILE_CITATIONS permission`,
);
return null;
}
} catch (error) {
logger.error(
`[processFileCitations] Permission check failed for FILE_CITATIONS: ${error.message}`,
);
logger.debug(`[processFileCitations] Proceeding with citations due to permission error`);
}
}
const customConfig = await getCustomConfig();
const maxCitations = customConfig?.endpoints?.agents?.maxCitations ?? 30;
const maxCitationsPerFile = customConfig?.endpoints?.agents?.maxCitationsPerFile ?? 5;
const minRelevanceScore = customConfig?.endpoints?.agents?.minRelevanceScore ?? 0.45;
const sources = toolArtifact[Tools.file_search].sources || [];
const filteredSources = sources.filter((source) => source.relevance >= minRelevanceScore);
if (filteredSources.length === 0) {
logger.debug(
`[processFileCitations] No sources above relevance threshold of ${minRelevanceScore}`,
);
return null;
}
const selectedSources = applyCitationLimits(filteredSources, maxCitations, maxCitationsPerFile);
const enhancedSources = await enhanceSourcesWithMetadata(selectedSources, customConfig);
if (enhancedSources.length > 0) {
const fileSearchAttachment = {
type: Tools.file_search,
[Tools.file_search]: { sources: enhancedSources },
toolCallId: toolCallId,
messageId: metadata.run_id,
conversationId: metadata.thread_id,
name: `${Tools.file_search}_file_search_results_${nanoid()}`,
};
return fileSearchAttachment;
}
return null;
} catch (error) {
logger.error('[processFileCitations] Error processing file citations:', error);
return null;
}
}
/**
* Apply citation limits to sources
* @param {Array} sources - All sources
* @param {number} maxCitations - Maximum total citations
* @param {number} maxCitationsPerFile - Maximum citations per file
* @returns {Array} Selected sources
*/
function applyCitationLimits(sources, maxCitations, maxCitationsPerFile) {
const byFile = {};
sources.forEach((source) => {
if (!byFile[source.fileId]) {
byFile[source.fileId] = [];
}
byFile[source.fileId].push(source);
});
const representatives = [];
for (const fileId in byFile) {
const fileSources = byFile[fileId].sort((a, b) => b.relevance - a.relevance);
const selectedFromFile = fileSources.slice(0, maxCitationsPerFile);
representatives.push(...selectedFromFile);
}
return representatives.sort((a, b) => b.relevance - a.relevance).slice(0, maxCitations);
}
/**
* Enhance sources with file metadata from database
* @param {Array} sources - Selected sources
* @param {Object} customConfig - Custom configuration
* @returns {Promise<Array>} Enhanced sources
*/
async function enhanceSourcesWithMetadata(sources, customConfig) {
const fileIds = [...new Set(sources.map((source) => source.fileId))];
let fileMetadataMap = {};
try {
const files = await Files.find({ file_id: { $in: fileIds } });
fileMetadataMap = files.reduce((map, file) => {
map[file.file_id] = file;
return map;
}, {});
} catch (error) {
logger.error('[enhanceSourcesWithMetadata] Error looking up file metadata:', error);
}
return sources.map((source) => {
const fileRecord = fileMetadataMap[source.fileId] || {};
const configuredStorageType = fileRecord.source || customConfig?.fileStrategy || 'local';
return {
...source,
fileName: fileRecord.filename || source.fileName || 'Unknown File',
metadata: {
...source.metadata,
storageType: configuredStorageType,
},
};
});
}
module.exports = {
applyCitationLimits,
processFileCitations,
enhanceSourcesWithMetadata,
};

View File

@@ -11,7 +11,6 @@ const {
imageExtRegex,
EToolResources,
} = require('librechat-data-provider');
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { convertImage } = require('~/server/services/Files/images/convert');
const { createFile, getFiles, updateFile } = require('~/models/File');
@@ -165,24 +164,14 @@ const primeFiles = async (options, apiKey) => {
const file_ids = tool_resources?.[EToolResources.execute_code]?.file_ids ?? [];
const agentResourceIds = new Set(file_ids);
const resourceFiles = tool_resources?.[EToolResources.execute_code]?.files ?? [];
// Get all files first
const allFiles = (await getFiles({ file_id: { $in: file_ids } }, null, { text: 0 })) ?? [];
// Filter by access if user and agent are provided
let dbFiles;
if (req?.user?.id && agentId) {
dbFiles = await filterFilesByAgentAccess({
files: allFiles,
userId: req.user.id,
role: req.user.role,
agentId,
});
} else {
dbFiles = allFiles;
}
dbFiles = dbFiles.concat(resourceFiles);
const dbFiles = (
(await getFiles(
{ file_id: { $in: file_ids } },
null,
{ text: 0 },
{ userId: req?.user?.id, agentId },
)) ?? []
).concat(resourceFiles);
const files = [];
const sessions = new Map();
@@ -236,17 +225,7 @@ const primeFiles = async (options, apiKey) => {
entity_id: queryParams.entity_id,
apiKey,
});
// Preserve existing metadata when adding fileIdentifier
const updatedMetadata = {
...file.metadata, // Preserve existing metadata (like S3 storage info)
fileIdentifier, // Add fileIdentifier
};
await updateFile({
file_id: file.file_id,
metadata: updatedMetadata,
});
await updateFile({ file_id: file.file_id, metadata: { fileIdentifier } });
sessions.set(session_id, true);
pushFile();
} catch (error) {

View File

@@ -1,4 +1,5 @@
const fs = require('fs');
const path = require('path');
const fetch = require('node-fetch');
const { FileSources } = require('librechat-data-provider');
const {
@@ -14,7 +15,7 @@ const { logger } = require('~/config');
const bucketName = process.env.AWS_BUCKET_NAME;
const defaultBasePath = 'images';
let s3UrlExpirySeconds = 2 * 60; // 2 minutes
let s3UrlExpirySeconds = 7 * 24 * 60 * 60;
let s3RefreshExpiryMs = null;
if (process.env.S3_URL_EXPIRY_SECONDS !== undefined) {
@@ -24,7 +25,7 @@ if (process.env.S3_URL_EXPIRY_SECONDS !== undefined) {
s3UrlExpirySeconds = Math.min(parsed, 7 * 24 * 60 * 60);
} else {
logger.warn(
`[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using 2-minute expiry.`,
`[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using 7-day expiry.`,
);
}
}
@@ -79,29 +80,12 @@ async function saveBufferToS3({ userId, buffer, fileName, basePath = defaultBase
* @param {string} params.userId - The user's unique identifier.
* @param {string} params.fileName - The file name in S3.
* @param {string} [params.basePath='images'] - The base path in the bucket.
* @param {string} [params.customFilename] - Custom filename for Content-Disposition header (overrides extracted filename).
* @param {string} [params.contentType] - Custom content type for the response.
* @returns {Promise<string>} A URL to access the S3 object
*/
async function getS3URL({
userId,
fileName,
basePath = defaultBasePath,
customFilename = null,
contentType = null,
}) {
async function getS3URL({ userId, fileName, basePath = defaultBasePath }) {
const key = getS3Key(basePath, userId, fileName);
const params = { Bucket: bucketName, Key: key };
// Add response headers if specified
if (customFilename) {
params.ResponseContentDisposition = `attachment; filename="${customFilename}"`;
}
if (contentType) {
params.ResponseContentType = contentType;
}
try {
const s3 = initializeS3();
return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: s3UrlExpirySeconds });
@@ -204,7 +188,7 @@ async function uploadFileToS3({ req, file, file_id, basePath = defaultBasePath }
try {
const inputFilePath = file.path;
const userId = req.user.id;
const fileName = `${file_id}__${file.originalname}`;
const fileName = `${file_id}__${path.basename(inputFilePath)}`;
const key = getS3Key(basePath, userId, fileName);
const stats = await fs.promises.stat(inputFilePath);

View File

@@ -60,14 +60,13 @@ const deleteVectors = async (req, file) => {
* have a `path` property that points to the location of the uploaded file.
* @param {string} params.file_id - The file ID.
* @param {string} [params.entity_id] - The entity ID for shared resources.
* @param {Object} [params.storageMetadata] - Storage metadata for dual storage pattern.
*
* @returns {Promise<{ filepath: string, bytes: number }>}
* A promise that resolves to an object containing:
* - filepath: The path where the file is saved.
* - bytes: The size of the file in bytes.
*/
async function uploadVectors({ req, file, file_id, entity_id, storageMetadata }) {
async function uploadVectors({ req, file, file_id, entity_id }) {
if (!process.env.RAG_API_URL) {
throw new Error('RAG_API_URL not defined');
}
@@ -81,11 +80,6 @@ async function uploadVectors({ req, file, file_id, entity_id, storageMetadata })
formData.append('entity_id', entity_id);
}
// Include storage metadata for RAG API to store with embeddings
if (storageMetadata) {
formData.append('storage_metadata', JSON.stringify(storageMetadata));
}
const formHeaders = formData.getHeaders();
const response = await axios.post(`${process.env.RAG_API_URL}/embed`, formData, {

View File

@@ -1,12 +0,0 @@
const { processCodeFile } = require('./Code/process');
const { processFileUpload } = require('./process');
const { uploadImageBuffer } = require('./images');
const { hasAccessToFilesViaAgent, filterFilesByAgentAccess } = require('./permissions');
module.exports = {
processCodeFile,
processFileUpload,
uploadImageBuffer,
hasAccessToFilesViaAgent,
filterFilesByAgentAccess,
};

View File

@@ -1,129 +0,0 @@
const { logger } = require('@librechat/data-schemas');
const { PermissionBits, ResourceType } = require('librechat-data-provider');
const { checkPermission } = require('~/server/services/PermissionService');
const { getAgent } = require('~/models/Agent');
/**
* Checks if a user has access to multiple files through a shared agent (batch operation)
* @param {Object} params - Parameters object
* @param {string} params.userId - The user ID to check access for
* @param {string} [params.role] - Optional user role to avoid DB query
* @param {string[]} params.fileIds - Array of file IDs to check
* @param {string} params.agentId - The agent ID that might grant access
* @returns {Promise<Map<string, boolean>>} Map of fileId to access status
*/
const hasAccessToFilesViaAgent = async ({ userId, role, fileIds, agentId }) => {
const accessMap = new Map();
// Initialize all files as no access
fileIds.forEach((fileId) => accessMap.set(fileId, false));
try {
const agent = await getAgent({ id: agentId });
if (!agent) {
return accessMap;
}
// Check if user is the author - if so, grant access to all files
if (agent.author.toString() === userId.toString()) {
fileIds.forEach((fileId) => accessMap.set(fileId, true));
return accessMap;
}
// Check if user has at least VIEW permission on the agent
const hasViewPermission = await checkPermission({
userId,
role,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
requiredPermission: PermissionBits.VIEW,
});
if (!hasViewPermission) {
return accessMap;
}
// Check if user has EDIT permission (which would indicate collaborative access)
const hasEditPermission = await checkPermission({
userId,
role,
resourceType: ResourceType.AGENT,
resourceId: agent._id,
requiredPermission: PermissionBits.EDIT,
});
// If user only has VIEW permission, they can't access files
// Only users with EDIT permission or higher can access agent files
if (!hasEditPermission) {
return accessMap;
}
// User has edit permissions - check which files are actually attached
const attachedFileIds = new Set();
if (agent.tool_resources) {
for (const [_resourceType, resource] of Object.entries(agent.tool_resources)) {
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
resource.file_ids.forEach((fileId) => attachedFileIds.add(fileId));
}
}
}
// Grant access only to files that are attached to this agent
fileIds.forEach((fileId) => {
if (attachedFileIds.has(fileId)) {
accessMap.set(fileId, true);
}
});
return accessMap;
} catch (error) {
logger.error('[hasAccessToFilesViaAgent] Error checking file access:', error);
return accessMap;
}
};
/**
* Filter files based on user access through agents
* @param {Object} params - Parameters object
* @param {Array<MongoFile>} params.files - Array of file documents
* @param {string} params.userId - User ID for access control
* @param {string} [params.role] - Optional user role to avoid DB query
* @param {string} params.agentId - Agent ID that might grant access to files
* @returns {Promise<Array<MongoFile>>} Filtered array of accessible files
*/
const filterFilesByAgentAccess = async ({ files, userId, role, agentId }) => {
if (!userId || !agentId || !files || files.length === 0) {
return files;
}
// Separate owned files from files that need access check
const filesToCheck = [];
const ownedFiles = [];
for (const file of files) {
if (file.user && file.user.toString() === userId.toString()) {
ownedFiles.push(file);
} else {
filesToCheck.push(file);
}
}
if (filesToCheck.length === 0) {
return ownedFiles;
}
// Batch check access for all non-owned files
const fileIds = filesToCheck.map((f) => f.file_id);
const accessMap = await hasAccessToFilesViaAgent({ userId, role, fileIds, agentId });
// Filter files based on access
const accessibleFiles = filesToCheck.filter((file) => accessMap.get(file.file_id));
return [...ownedFiles, ...accessibleFiles];
};
module.exports = {
hasAccessToFilesViaAgent,
filterFilesByAgentAccess,
};

View File

@@ -11,12 +11,13 @@ const {
EModelEndpoint,
EToolResources,
mergeFileConfig,
hostImageIdSuffix,
AgentCapabilities,
checkOpenAIStorage,
removeNullishValues,
hostImageNamePrefix,
isAssistantsEndpoint,
} = require('librechat-data-provider');
const { sanitizeFilename } = require('@librechat/api');
const { EnvVar } = require('@librechat/agents');
const {
convertImage,
@@ -31,33 +32,9 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { checkCapability } = require('~/server/services/Config');
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
const { getStrategyFunctions } = require('./strategies');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
const { determineFileType } = require('~/server/utils');
const { logger } = require('~/config');
/**
* Creates a modular file upload wrapper that ensures filename sanitization
* across all storage strategies. This prevents storage-specific implementations
* from having to handle sanitization individually.
*
* @param {Function} uploadFunction - The storage strategy's upload function
* @returns {Function} - Wrapped upload function with sanitization
*/
const createSanitizedUploadWrapper = (uploadFunction) => {
return async (params) => {
const { req, file, file_id, ...restParams } = params;
// Create a modified file object with sanitized original name
// This ensures consistent filename handling across all storage strategies
const sanitizedFile = {
...file,
originalname: sanitizeFilename(file.originalname),
};
return uploadFunction({ req, file: sanitizedFile, file_id, ...restParams });
};
};
/**
*
* @param {Array<MongoFile>} files
@@ -320,7 +297,7 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c
*/
const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
const { file } = req;
const source = getFileStrategy(req.app.locals, { isImage: true });
const source = req.app.locals.fileStrategy;
const { handleImageUpload } = getStrategyFunctions(source);
const { file_id, temp_file_id, endpoint } = metadata;
@@ -366,7 +343,7 @@ const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
* @returns {Promise<{ filepath: string, filename: string, source: string, type: string}>}
*/
const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true }) => {
const source = getFileStrategy(req.app.locals, { isImage: true });
const source = req.app.locals.fileStrategy;
const { saveBuffer } = getStrategyFunctions(source);
let { buffer, width, height, bytes, filename, file_id, type } = metadata;
if (resize) {
@@ -414,10 +391,9 @@ const processFileUpload = async ({ req, res, metadata }) => {
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
const assistantSource =
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
// Use the configured file strategy for regular file uploads (not vectordb)
const source = isAssistantUpload ? assistantSource : req.app.locals.fileStrategy;
const source = isAssistantUpload ? assistantSource : FileSources.vectordb;
const { handleFileUpload } = getStrategyFunctions(source);
const { file_id, temp_file_id = null } = metadata;
const { file_id, temp_file_id } = metadata;
/** @type {OpenAI | undefined} */
let openai;
@@ -426,7 +402,6 @@ const processFileUpload = async ({ req, res, metadata }) => {
}
const { file } = req;
const sanitizedUploadFn = createSanitizedUploadWrapper(handleFileUpload);
const {
id,
bytes,
@@ -435,7 +410,7 @@ const processFileUpload = async ({ req, res, metadata }) => {
embedded,
height,
width,
} = await sanitizedUploadFn({
} = await handleFileUpload({
req,
file,
file_id,
@@ -474,7 +449,7 @@ const processFileUpload = async ({ req, res, metadata }) => {
temp_file_id,
bytes,
filepath,
filename: filename ?? sanitizeFilename(file.originalname),
filename: filename ?? file.originalname,
context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment,
model: isAssistantUpload ? req.body.model : undefined,
type: file.mimetype,
@@ -501,7 +476,7 @@ const processFileUpload = async ({ req, res, metadata }) => {
*/
const processAgentFileUpload = async ({ req, res, metadata }) => {
const { file } = req;
const { agent_id, tool_resource, file_id, temp_file_id = null } = metadata;
const { agent_id, tool_resource } = metadata;
if (agent_id && !tool_resource) {
throw new Error('No tool resource provided for agent file upload');
}
@@ -545,7 +520,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
if (!isFileSearchEnabled) {
throw new Error('File search is not enabled for Agents');
}
// Note: File search processing continues to dual storage logic below
} else if (tool_resource === EToolResources.ocr) {
const isOCREnabled = await checkCapability(req, AgentCapabilities.ocr);
if (!isOCREnabled) {
@@ -555,13 +529,13 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
const { handleFileUpload: uploadOCR } = getStrategyFunctions(
req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr,
);
const { file_id, temp_file_id = null } = metadata;
const { file_id, temp_file_id } = metadata;
const {
text,
bytes,
// TODO: OCR images support?
images: _i,
images,
filename,
filepath: ocrFileURL,
} = await uploadOCR({ req, file, loadAuthValues });
@@ -594,54 +568,28 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
.json({ message: 'Agent file uploaded and processed successfully', ...result });
}
// Dual storage pattern for RAG files: Storage + Vector DB
let storageResult, embeddingResult;
const isImageFile = file.mimetype.startsWith('image');
const source = getFileStrategy(req.app.locals, { isImage: isImageFile });
if (tool_resource === EToolResources.file_search) {
// FIRST: Upload to Storage for permanent backup (S3/local/etc.)
const { handleFileUpload } = getStrategyFunctions(source);
const sanitizedUploadFn = createSanitizedUploadWrapper(handleFileUpload);
storageResult = await sanitizedUploadFn({
req,
file,
file_id,
entity_id,
basePath,
});
// SECOND: Upload to Vector DB
const { uploadVectors } = require('./VectorDB/crud');
embeddingResult = await uploadVectors({
req,
file,
file_id,
entity_id,
});
// Vector status will be stored at root level, no need for metadata
fileInfoMetadata = {};
} else {
// Standard single storage for non-RAG files
const { handleFileUpload } = getStrategyFunctions(source);
const sanitizedUploadFn = createSanitizedUploadWrapper(handleFileUpload);
storageResult = await sanitizedUploadFn({
req,
file,
file_id,
entity_id,
basePath,
});
}
const { bytes, filename, filepath: _filepath, height, width } = storageResult;
// For RAG files, use embedding result; for others, use storage result
const embedded =
const source =
tool_resource === EToolResources.file_search
? embeddingResult?.embedded
: storageResult.embedded;
? FileSources.vectordb
: req.app.locals.fileStrategy;
const { handleFileUpload } = getStrategyFunctions(source);
const { file_id, temp_file_id } = metadata;
const {
bytes,
filename,
filepath: _filepath,
embedded,
height,
width,
} = await handleFileUpload({
req,
file,
file_id,
entity_id,
basePath,
});
let filepath = _filepath;
@@ -670,7 +618,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
temp_file_id,
bytes,
filepath,
filename: filename ?? sanitizeFilename(file.originalname),
filename: filename ?? file.originalname,
context: messageAttachment ? FileContext.message_attachment : FileContext.agents,
model: messageAttachment ? undefined : req.body.model,
metadata: fileInfoMetadata,
@@ -682,7 +630,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
});
const result = await createFile(fileInfo, true);
res.status(200).json({ message: 'Agent file uploaded and processed successfully', ...result });
};
@@ -753,21 +700,31 @@ const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileEx
const currentDate = new Date();
const formattedDate = currentDate.toISOString();
const _file = await convertImage(req, buffer, undefined, `${file_id}${fileExt}`);
// Create only one file record with the correct information
const file = {
..._file,
usage: 1,
user: req.user.id,
type: mime.getType(fileExt),
type: `image/${req.app.locals.imageOutputType}`,
createdAt: formattedDate,
updatedAt: formattedDate,
source: getFileStrategy(req.app.locals, { isImage: true }),
source: req.app.locals.fileStrategy,
context: FileContext.assistants_output,
file_id,
filename,
file_id: `${file_id}${hostImageIdSuffix}`,
filename: `${hostImageNamePrefix}${filename}`,
};
createFile(file, true);
const source =
req.body.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
createFile(
{
...file,
file_id,
filename,
source,
type: mime.getType(fileExt),
},
true,
);
return file;
};
@@ -903,7 +860,7 @@ async function saveBase64Image(
}
const image = await resizeImageBuffer(inputBuffer, effectiveResolution, endpoint);
const source = getFileStrategy(req.app.locals, { isImage: true });
const source = req.app.locals.fileStrategy;
const { saveBuffer } = getStrategyFunctions(source);
const filepath = await saveBuffer({
userId: req.user.id,

View File

@@ -17,21 +17,6 @@ jest.mock('~/config', () => ({
jest.mock('librechat-data-provider', () => ({
isUUID: { parse: jest.fn() },
megabyte: 1024 * 1024,
PrincipalType: {
USER: 'user',
GROUP: 'group',
PUBLIC: 'public',
},
PrincipalModel: {
USER: 'User',
GROUP: 'Group',
},
ResourceType: {
AGENT: 'agent',
PROJECT: 'project',
FILE: 'file',
PROMPTGROUP: 'promptGroup',
},
FileContext: { message_attachment: 'message_attachment' },
FileSources: { local: 'local' },
EModelEndpoint: { assistants: 'assistants' },
@@ -39,26 +24,6 @@ jest.mock('librechat-data-provider', () => ({
mergeFileConfig: jest.fn(),
removeNullishValues: jest.fn((obj) => obj),
isAssistantsEndpoint: jest.fn(),
Constants: { COMMANDS_MAX_LENGTH: 56 },
PermissionTypes: {
BOOKMARKS: 'BOOKMARKS',
PROMPTS: 'PROMPTS',
MEMORIES: 'MEMORIES',
MULTI_CONVO: 'MULTI_CONVO',
AGENTS: 'AGENTS',
TEMPORARY_CHAT: 'TEMPORARY_CHAT',
RUN_CODE: 'RUN_CODE',
WEB_SEARCH: 'WEB_SEARCH',
FILE_CITATIONS: 'FILE_CITATIONS',
},
Permissions: {
USE: 'USE',
OPT_OUT: 'OPT_OUT',
},
SystemRoles: {
USER: 'USER',
ADMIN: 'ADMIN',
},
}));
jest.mock('~/server/services/Files/images', () => ({

View File

@@ -270,12 +270,8 @@ const getStrategyFunctions = (fileSource) => {
return azureMistralOCRStrategy();
} else if (fileSource === FileSources.vertexai_mistral_ocr) {
return vertexMistralOCRStrategy();
} else if (fileSource === FileSources.text) {
return localStrategy(); // Text files use local strategy
} else {
throw new Error(
`Invalid file source: ${fileSource}. Available sources: ${Object.values(FileSources).join(', ')}`,
);
throw new Error('Invalid file source');
}
};

View File

@@ -1,525 +0,0 @@
const client = require('openid-client');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider');
const { Client } = require('@microsoft/microsoft-graph-client');
const { getOpenIdConfig } = require('~/strategies/openidStrategy');
const getLogStores = require('~/cache/getLogStores');
/**
* @import { TPrincipalSearchResult, TGraphPerson, TGraphUser, TGraphGroup, TGraphPeopleResponse, TGraphUsersResponse, TGraphGroupsResponse } from 'librechat-data-provider'
*/
/**
* Checks if Entra ID principal search feature is enabled based on environment variables and user authentication
* @param {Object} user - User object from request
* @param {string} user.provider - Authentication provider
* @param {string} user.openidId - OpenID subject identifier
* @returns {boolean} True if Entra ID principal search is enabled and user is authenticated via OpenID
*/
const entraIdPrincipalFeatureEnabled = (user) => {
return (
isEnabled(process.env.USE_ENTRA_ID_FOR_PEOPLE_SEARCH) &&
isEnabled(process.env.OPENID_REUSE_TOKENS) &&
user?.provider === 'openid' &&
user?.openidId
);
};
/**
* Creates a Microsoft Graph client with on-behalf-of token exchange
* @param {string} accessToken - OpenID Connect access token from user
* @param {string} sub - Subject identifier from token claims
* @returns {Promise<Client>} Authenticated Graph API client
*/
const createGraphClient = async (accessToken, sub) => {
try {
// Reason: Use existing OpenID configuration and token exchange pattern from openidStrategy.js
const openidConfig = getOpenIdConfig();
const exchangedToken = await exchangeTokenForGraphAccess(openidConfig, accessToken, sub);
const graphClient = Client.init({
authProvider: (done) => {
done(null, exchangedToken);
},
});
return graphClient;
} catch (error) {
logger.error('[createGraphClient] Error creating Graph client:', error);
throw error;
}
};
/**
* Exchange OpenID token for Graph API access using on-behalf-of flow
* Similar to exchangeAccessTokenIfNeeded in openidStrategy.js but for Graph scopes
* @param {Configuration} config - OpenID configuration
* @param {string} accessToken - Original access token
* @param {string} sub - Subject identifier
* @returns {Promise<string>} Graph API access token
*/
const exchangeTokenForGraphAccess = async (config, accessToken, sub) => {
try {
const tokensCache = getLogStores(CacheKeys.OPENID_EXCHANGED_TOKENS);
const cacheKey = `${sub}:graph`;
const cachedToken = await tokensCache.get(cacheKey);
if (cachedToken) {
return cachedToken.access_token;
}
const graphScopes = process.env.OPENID_GRAPH_SCOPES || 'User.Read,People.Read,Group.Read.All';
const scopeString = graphScopes
.split(',')
.map((scope) => `https://graph.microsoft.com/${scope}`)
.join(' ');
const grantResponse = await client.genericGrantRequest(
config,
'urn:ietf:params:oauth:grant-type:jwt-bearer',
{
scope: scopeString,
assertion: accessToken,
requested_token_use: 'on_behalf_of',
},
);
await tokensCache.set(
cacheKey,
{
access_token: grantResponse.access_token,
},
grantResponse.expires_in * 1000,
);
return grantResponse.access_token;
} catch (error) {
logger.error('[exchangeTokenForGraphAccess] Token exchange failed:', error);
throw error;
}
};
/**
* Search for principals (people and groups) using Microsoft Graph API
* Uses searchContacts first, then searchUsers and searchGroups to fill remaining slots
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @param {string} query - Search query string
* @param {string} type - Type filter ('users', 'groups', or 'all')
* @param {number} limit - Maximum number of results
* @returns {Promise<TPrincipalSearchResult[]>} Array of principal search results
*/
const searchEntraIdPrincipals = async (accessToken, sub, query, type = 'all', limit = 10) => {
try {
if (!query || query.trim().length < 2) {
return [];
}
const graphClient = await createGraphClient(accessToken, sub);
let allResults = [];
if (type === 'users' || type === 'all') {
const contactResults = await searchContacts(graphClient, query, limit);
allResults.push(...contactResults);
}
if (allResults.length >= limit) {
return allResults.slice(0, limit);
}
if (type === 'users') {
const userResults = await searchUsers(graphClient, query, limit);
allResults.push(...userResults);
} else if (type === 'groups') {
const groupResults = await searchGroups(graphClient, query, limit);
allResults.push(...groupResults);
} else if (type === 'all') {
const [userResults, groupResults] = await Promise.all([
searchUsers(graphClient, query, limit),
searchGroups(graphClient, query, limit),
]);
allResults.push(...userResults, ...groupResults);
}
const seenIds = new Set();
const uniqueResults = allResults.filter((result) => {
if (seenIds.has(result.idOnTheSource)) {
return false;
}
seenIds.add(result.idOnTheSource);
return true;
});
return uniqueResults.slice(0, limit);
} catch (error) {
logger.error('[searchEntraIdPrincipals] Error searching principals:', error);
return [];
}
};
/**
* Get current user's Entra ID group memberships from Microsoft Graph
* Uses /me/memberOf endpoint to get groups the user is a member of
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @returns {Promise<Array<string>>} Array of group ID strings (GUIDs)
*/
const getUserEntraGroups = async (accessToken, sub) => {
try {
const graphClient = await createGraphClient(accessToken, sub);
const groupsResponse = await graphClient.api('/me/memberOf').select('id').get();
return (groupsResponse.value || []).map((group) => group.id);
} catch (error) {
logger.error('[getUserEntraGroups] Error fetching user groups:', error);
return [];
}
};
/**
* Get current user's owned Entra ID groups from Microsoft Graph
* Uses /me/ownedObjects/microsoft.graph.group endpoint to get groups the user owns
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @returns {Promise<Array<string>>} Array of group ID strings (GUIDs)
*/
const getUserOwnedEntraGroups = async (accessToken, sub) => {
try {
const graphClient = await createGraphClient(accessToken, sub);
const groupsResponse = await graphClient
.api('/me/ownedObjects/microsoft.graph.group')
.select('id')
.get();
return (groupsResponse.value || []).map((group) => group.id);
} catch (error) {
logger.error('[getUserOwnedEntraGroups] Error fetching user owned groups:', error);
return [];
}
};
/**
* Get group members from Microsoft Graph API
* Recursively fetches all members using pagination (@odata.nextLink)
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @param {string} groupId - Entra ID group object ID
* @returns {Promise<Array>} Array of member IDs (idOnTheSource values)
*/
const getGroupMembers = async (accessToken, sub, groupId) => {
try {
const graphClient = await createGraphClient(accessToken, sub);
const allMembers = [];
let nextLink = `/groups/${groupId}/members`;
while (nextLink) {
const membersResponse = await graphClient.api(nextLink).select('id').top(999).get();
const members = membersResponse.value || [];
allMembers.push(...members.map((member) => member.id));
nextLink = membersResponse['@odata.nextLink']
? membersResponse['@odata.nextLink'].split('/v1.0')[1]
: null;
}
return allMembers;
} catch (error) {
logger.error('[getGroupMembers] Error fetching group members:', error);
return [];
}
};
/**
* Get group owners from Microsoft Graph API
* Recursively fetches all owners using pagination (@odata.nextLink)
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @param {string} groupId - Entra ID group object ID
* @returns {Promise<Array>} Array of owner IDs (idOnTheSource values)
*/
const getGroupOwners = async (accessToken, sub, groupId) => {
try {
const graphClient = await createGraphClient(accessToken, sub);
const allOwners = [];
let nextLink = `/groups/${groupId}/owners`;
while (nextLink) {
const ownersResponse = await graphClient.api(nextLink).select('id').top(999).get();
const owners = ownersResponse.value || [];
allOwners.push(...owners.map((member) => member.id));
nextLink = ownersResponse['@odata.nextLink']
? ownersResponse['@odata.nextLink'].split('/v1.0')[1]
: null;
}
return allOwners;
} catch (error) {
logger.error('[getGroupOwners] Error fetching group owners:', error);
return [];
}
};
/**
* Search for contacts (users only) using Microsoft Graph /me/people endpoint
* Returns mapped TPrincipalSearchResult objects for users only
* @param {Client} graphClient - Authenticated Microsoft Graph client
* @param {string} query - Search query string
* @param {number} limit - Maximum number of results (default: 10)
* @returns {Promise<TPrincipalSearchResult[]>} Array of mapped user contact results
*/
const searchContacts = async (graphClient, query, limit = 10) => {
try {
if (!query || query.trim().length < 2) {
return [];
}
if (
process.env.OPENID_GRAPH_SCOPES &&
!process.env.OPENID_GRAPH_SCOPES.toLowerCase().includes('people.read')
) {
logger.warn('[searchContacts] People.Read scope is not enabled, skipping contact search');
return [];
}
// Reason: Search only for OrganizationUser (person) type, not groups
const filter = "personType/subclass eq 'OrganizationUser'";
let apiCall = graphClient
.api('/me/people')
.search(`"${query}"`)
.select(
'id,displayName,givenName,surname,userPrincipalName,jobTitle,department,companyName,scoredEmailAddresses,personType,phones',
)
.header('ConsistencyLevel', 'eventual')
.filter(filter)
.top(limit);
const contactsResponse = await apiCall.get();
return (contactsResponse.value || []).map(mapContactToTPrincipalSearchResult);
} catch (error) {
logger.error('[searchContacts] Error searching contacts:', error);
return [];
}
};
/**
* Search for users using Microsoft Graph /users endpoint
* Returns mapped TPrincipalSearchResult objects
* @param {Client} graphClient - Authenticated Microsoft Graph client
* @param {string} query - Search query string
* @param {number} limit - Maximum number of results (default: 10)
* @returns {Promise<TPrincipalSearchResult[]>} Array of mapped user results
*/
const searchUsers = async (graphClient, query, limit = 10) => {
try {
if (!query || query.trim().length < 2) {
return [];
}
// Reason: Search users by display name, email, and user principal name
const usersResponse = await graphClient
.api('/users')
.search(
`"displayName:${query}" OR "userPrincipalName:${query}" OR "mail:${query}" OR "givenName:${query}" OR "surname:${query}"`,
)
.select(
'id,displayName,givenName,surname,userPrincipalName,jobTitle,department,companyName,mail,phones',
)
.header('ConsistencyLevel', 'eventual')
.top(limit)
.get();
return (usersResponse.value || []).map(mapUserToTPrincipalSearchResult);
} catch (error) {
logger.error('[searchUsers] Error searching users:', error);
return [];
}
};
/**
* Search for groups using Microsoft Graph /groups endpoint
* Returns mapped TPrincipalSearchResult objects, includes all group types
* @param {Client} graphClient - Authenticated Microsoft Graph client
* @param {string} query - Search query string
* @param {number} limit - Maximum number of results (default: 10)
* @returns {Promise<TPrincipalSearchResult[]>} Array of mapped group results
*/
const searchGroups = async (graphClient, query, limit = 10) => {
try {
if (!query || query.trim().length < 2) {
return [];
}
// Reason: Search all groups by display name and email without filtering group types
const groupsResponse = await graphClient
.api('/groups')
.search(`"displayName:${query}" OR "mail:${query}" OR "mailNickname:${query}"`)
.select('id,displayName,mail,mailNickname,description,groupTypes,resourceProvisioningOptions')
.header('ConsistencyLevel', 'eventual')
.top(limit)
.get();
return (groupsResponse.value || []).map(mapGroupToTPrincipalSearchResult);
} catch (error) {
logger.error('[searchGroups] Error searching groups:', error);
return [];
}
};
/**
* Test Graph API connectivity and permissions
* @param {string} accessToken - OpenID Connect access token
* @param {string} sub - Subject identifier
* @returns {Promise<Object>} Test results with available permissions
*/
const testGraphApiAccess = async (accessToken, sub) => {
try {
const graphClient = await createGraphClient(accessToken, sub);
const results = {
userAccess: false,
peopleAccess: false,
groupsAccess: false,
usersEndpointAccess: false,
groupsEndpointAccess: false,
errors: [],
};
// Test User.Read permission
try {
await graphClient.api('/me').select('id,displayName').get();
results.userAccess = true;
} catch (error) {
results.errors.push(`User.Read: ${error.message}`);
}
// Test People.Read permission with OrganizationUser filter
try {
await graphClient
.api('/me/people')
.filter("personType/subclass eq 'OrganizationUser'")
.top(1)
.get();
results.peopleAccess = true;
} catch (error) {
results.errors.push(`People.Read (OrganizationUser): ${error.message}`);
}
// Test People.Read permission with UnifiedGroup filter
try {
await graphClient
.api('/me/people')
.filter("personType/subclass eq 'UnifiedGroup'")
.top(1)
.get();
results.groupsAccess = true;
} catch (error) {
results.errors.push(`People.Read (UnifiedGroup): ${error.message}`);
}
// Test /users endpoint access (requires User.Read.All or similar)
try {
await graphClient
.api('/users')
.search('"displayName:test"')
.select('id,displayName,userPrincipalName')
.top(1)
.get();
results.usersEndpointAccess = true;
} catch (error) {
results.errors.push(`Users endpoint: ${error.message}`);
}
// Test /groups endpoint access (requires Group.Read.All or similar)
try {
await graphClient
.api('/groups')
.search('"displayName:test"')
.select('id,displayName,mail')
.top(1)
.get();
results.groupsEndpointAccess = true;
} catch (error) {
results.errors.push(`Groups endpoint: ${error.message}`);
}
return results;
} catch (error) {
logger.error('[testGraphApiAccess] Error testing Graph API access:', error);
return {
userAccess: false,
peopleAccess: false,
groupsAccess: false,
usersEndpointAccess: false,
groupsEndpointAccess: false,
errors: [error.message],
};
}
};
/**
* Map Graph API user object to TPrincipalSearchResult format
* @param {TGraphUser} user - Raw user object from Graph API
* @returns {TPrincipalSearchResult} Mapped user result
*/
const mapUserToTPrincipalSearchResult = (user) => {
return {
id: null,
type: 'user',
name: user.displayName,
email: user.mail || user.userPrincipalName,
username: user.userPrincipalName,
source: 'entra',
idOnTheSource: user.id,
};
};
/**
* Map Graph API group object to TPrincipalSearchResult format
* @param {TGraphGroup} group - Raw group object from Graph API
* @returns {TPrincipalSearchResult} Mapped group result
*/
const mapGroupToTPrincipalSearchResult = (group) => {
return {
id: null,
type: 'group',
name: group.displayName,
email: group.mail || group.userPrincipalName,
description: group.description,
source: 'entra',
idOnTheSource: group.id,
};
};
/**
* Map Graph API /me/people contact object to TPrincipalSearchResult format
* Handles both user and group contacts from the people endpoint
* @param {TGraphPerson} contact - Raw contact object from Graph API /me/people
* @returns {TPrincipalSearchResult} Mapped contact result
*/
const mapContactToTPrincipalSearchResult = (contact) => {
const isGroup = contact.personType?.class === 'Group';
const primaryEmail = contact.scoredEmailAddresses?.[0]?.address;
return {
id: null,
type: isGroup ? 'group' : 'user',
name: contact.displayName,
email: primaryEmail,
username: !isGroup ? contact.userPrincipalName : undefined,
source: 'entra',
idOnTheSource: contact.id,
};
};
module.exports = {
getGroupMembers,
getGroupOwners,
createGraphClient,
getUserEntraGroups,
getUserOwnedEntraGroups,
testGraphApiAccess,
searchEntraIdPrincipals,
exchangeTokenForGraphAccess,
entraIdPrincipalFeatureEnabled,
};

View File

@@ -1,720 +0,0 @@
jest.mock('@microsoft/microsoft-graph-client');
jest.mock('~/strategies/openidStrategy');
jest.mock('~/cache/getLogStores');
jest.mock('@librechat/data-schemas', () => ({
...jest.requireActual('@librechat/data-schemas'),
logger: {
error: jest.fn(),
debug: jest.fn(),
},
}));
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
debug: jest.fn(),
},
createAxiosInstance: jest.fn(() => ({
create: jest.fn(),
defaults: {},
})),
}));
jest.mock('~/utils', () => ({
logAxiosError: jest.fn(),
}));
jest.mock('~/server/services/Config', () => ({}));
jest.mock('~/server/services/Files/strategies', () => ({
getStrategyFunctions: jest.fn(),
}));
const mongoose = require('mongoose');
const client = require('openid-client');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { Client } = require('@microsoft/microsoft-graph-client');
const { getOpenIdConfig } = require('~/strategies/openidStrategy');
const getLogStores = require('~/cache/getLogStores');
const GraphApiService = require('./GraphApiService');
describe('GraphApiService', () => {
let mongoServer;
let mockGraphClient;
let mockTokensCache;
let mockOpenIdConfig;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
afterEach(() => {
// Clean up environment variables
delete process.env.OPENID_GRAPH_SCOPES;
});
beforeEach(async () => {
jest.clearAllMocks();
await mongoose.connection.dropDatabase();
// Set up environment variable for People.Read scope
process.env.OPENID_GRAPH_SCOPES = 'User.Read,People.Read,Group.Read.All';
// Mock Graph client
mockGraphClient = {
api: jest.fn().mockReturnThis(),
search: jest.fn().mockReturnThis(),
filter: jest.fn().mockReturnThis(),
select: jest.fn().mockReturnThis(),
header: jest.fn().mockReturnThis(),
top: jest.fn().mockReturnThis(),
get: jest.fn(),
};
Client.init.mockReturnValue(mockGraphClient);
// Mock tokens cache
mockTokensCache = {
get: jest.fn(),
set: jest.fn(),
};
getLogStores.mockReturnValue(mockTokensCache);
// Mock OpenID config
mockOpenIdConfig = {
client_id: 'test-client-id',
issuer: 'https://test-issuer.com',
};
getOpenIdConfig.mockReturnValue(mockOpenIdConfig);
// Mock openid-client (using the existing jest mock configuration)
if (client.genericGrantRequest) {
client.genericGrantRequest.mockResolvedValue({
access_token: 'mocked-graph-token',
expires_in: 3600,
});
}
});
describe('Dependency Contract Tests', () => {
it('should fail if getOpenIdConfig interface changes', () => {
// Reason: Ensure getOpenIdConfig returns expected structure
const config = getOpenIdConfig();
expect(config).toBeDefined();
expect(typeof config).toBe('object');
// Add specific property checks that GraphApiService depends on
expect(config).toHaveProperty('client_id');
expect(config).toHaveProperty('issuer');
// Ensure the function is callable
expect(typeof getOpenIdConfig).toBe('function');
});
it('should fail if openid-client.genericGrantRequest interface changes', () => {
// Reason: Ensure client.genericGrantRequest maintains expected signature
if (client.genericGrantRequest) {
expect(typeof client.genericGrantRequest).toBe('function');
// Test that it accepts the expected parameters
const mockCall = client.genericGrantRequest(
mockOpenIdConfig,
'urn:ietf:params:oauth:grant-type:jwt-bearer',
{
scope: 'test-scope',
assertion: 'test-token',
requested_token_use: 'on_behalf_of',
},
);
expect(mockCall).toBeDefined();
}
});
it('should fail if Microsoft Graph Client interface changes', () => {
// Reason: Ensure Graph Client maintains expected fluent API
expect(typeof Client.init).toBe('function');
const client = Client.init({ authProvider: jest.fn() });
expect(client).toHaveProperty('api');
expect(typeof client.api).toBe('function');
});
});
describe('createGraphClient', () => {
it('should create graph client with exchanged token', async () => {
const accessToken = 'test-access-token';
const sub = 'test-user-id';
const result = await GraphApiService.createGraphClient(accessToken, sub);
expect(getOpenIdConfig).toHaveBeenCalled();
expect(Client.init).toHaveBeenCalledWith({
authProvider: expect.any(Function),
});
expect(result).toBe(mockGraphClient);
});
it('should handle token exchange errors gracefully', async () => {
if (client.genericGrantRequest) {
client.genericGrantRequest.mockRejectedValue(new Error('Token exchange failed'));
}
await expect(GraphApiService.createGraphClient('invalid-token', 'test-user')).rejects.toThrow(
'Token exchange failed',
);
});
});
describe('exchangeTokenForGraphAccess', () => {
it('should return cached token if available', async () => {
const cachedToken = { access_token: 'cached-token' };
mockTokensCache.get.mockResolvedValue(cachedToken);
const result = await GraphApiService.exchangeTokenForGraphAccess(
mockOpenIdConfig,
'test-token',
'test-user',
);
expect(result).toBe('cached-token');
expect(mockTokensCache.get).toHaveBeenCalledWith('test-user:graph');
if (client.genericGrantRequest) {
expect(client.genericGrantRequest).not.toHaveBeenCalled();
}
});
it('should exchange token and cache result', async () => {
mockTokensCache.get.mockResolvedValue(null);
const result = await GraphApiService.exchangeTokenForGraphAccess(
mockOpenIdConfig,
'test-token',
'test-user',
);
if (client.genericGrantRequest) {
expect(client.genericGrantRequest).toHaveBeenCalledWith(
mockOpenIdConfig,
'urn:ietf:params:oauth:grant-type:jwt-bearer',
{
scope:
'https://graph.microsoft.com/User.Read https://graph.microsoft.com/People.Read https://graph.microsoft.com/Group.Read.All',
assertion: 'test-token',
requested_token_use: 'on_behalf_of',
},
);
}
expect(mockTokensCache.set).toHaveBeenCalledWith(
'test-user:graph',
{ access_token: 'mocked-graph-token' },
3600000,
);
expect(result).toBe('mocked-graph-token');
});
it('should use custom scopes from environment', async () => {
const originalEnv = process.env.OPENID_GRAPH_SCOPES;
process.env.OPENID_GRAPH_SCOPES = 'Custom.Read,Custom.Write';
mockTokensCache.get.mockResolvedValue(null);
await GraphApiService.exchangeTokenForGraphAccess(
mockOpenIdConfig,
'test-token',
'test-user',
);
if (client.genericGrantRequest) {
expect(client.genericGrantRequest).toHaveBeenCalledWith(
mockOpenIdConfig,
'urn:ietf:params:oauth:grant-type:jwt-bearer',
{
scope:
'https://graph.microsoft.com/Custom.Read https://graph.microsoft.com/Custom.Write',
assertion: 'test-token',
requested_token_use: 'on_behalf_of',
},
);
}
process.env.OPENID_GRAPH_SCOPES = originalEnv;
});
});
describe('searchEntraIdPrincipals', () => {
// Mock data used by multiple tests
const mockContactsResponse = {
value: [
{
id: 'contact-user-1',
displayName: 'John Doe',
userPrincipalName: 'john@company.com',
mail: 'john@company.com',
personType: { class: 'Person', subclass: 'OrganizationUser' },
scoredEmailAddresses: [{ address: 'john@company.com', relevanceScore: 0.9 }],
},
{
id: 'contact-group-1',
displayName: 'Marketing Team',
mail: 'marketing@company.com',
personType: { class: 'Group', subclass: 'UnifiedGroup' },
scoredEmailAddresses: [{ address: 'marketing@company.com', relevanceScore: 0.8 }],
},
],
};
const mockUsersResponse = {
value: [
{
id: 'dir-user-1',
displayName: 'Jane Smith',
userPrincipalName: 'jane@company.com',
mail: 'jane@company.com',
},
],
};
const mockGroupsResponse = {
value: [
{
id: 'dir-group-1',
displayName: 'Development Team',
mail: 'dev@company.com',
},
],
};
beforeEach(() => {
// Reset mock call history for each test
jest.clearAllMocks();
// Re-apply the Client.init mock after clearAllMocks
Client.init.mockReturnValue(mockGraphClient);
// Re-apply openid-client mock
if (client.genericGrantRequest) {
client.genericGrantRequest.mockResolvedValue({
access_token: 'mocked-graph-token',
expires_in: 3600,
});
}
// Re-apply cache mock
mockTokensCache.get.mockResolvedValue(null); // Force token exchange
mockTokensCache.set.mockResolvedValue();
getLogStores.mockReturnValue(mockTokensCache);
getOpenIdConfig.mockReturnValue(mockOpenIdConfig);
});
it('should return empty results for short queries', async () => {
const result = await GraphApiService.searchEntraIdPrincipals('token', 'user', 'a', 'all', 10);
expect(result).toEqual([]);
expect(mockGraphClient.api).not.toHaveBeenCalled();
});
it('should search contacts first and additional users for users type', async () => {
// Mock responses for this specific test
const contactsFilteredResponse = {
value: [
{
id: 'contact-user-1',
displayName: 'John Doe',
userPrincipalName: 'john@company.com',
mail: 'john@company.com',
personType: { class: 'Person', subclass: 'OrganizationUser' },
scoredEmailAddresses: [{ address: 'john@company.com', relevanceScore: 0.9 }],
},
],
};
mockGraphClient.get
.mockResolvedValueOnce(contactsFilteredResponse) // contacts call
.mockResolvedValueOnce(mockUsersResponse); // users call
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'john',
'users',
10,
);
// Should call contacts first with user filter
expect(mockGraphClient.api).toHaveBeenCalledWith('/me/people');
expect(mockGraphClient.search).toHaveBeenCalledWith('"john"');
expect(mockGraphClient.filter).toHaveBeenCalledWith(
"personType/subclass eq 'OrganizationUser'",
);
// Should call users endpoint for additional results
expect(mockGraphClient.api).toHaveBeenCalledWith('/users');
expect(mockGraphClient.search).toHaveBeenCalledWith(
'"displayName:john" OR "userPrincipalName:john" OR "mail:john" OR "givenName:john" OR "surname:john"',
);
// Should return TPrincipalSearchResult array
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(2); // 1 from contacts + 1 from users
expect(result[0]).toMatchObject({
id: null,
type: 'user',
name: 'John Doe',
email: 'john@company.com',
source: 'entra',
idOnTheSource: 'contact-user-1',
});
});
it('should search groups endpoint only for groups type', async () => {
// Mock responses for this specific test - only groups endpoint called
mockGraphClient.get.mockResolvedValueOnce(mockGroupsResponse); // only groups call
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'team',
'groups',
10,
);
// Should NOT call contacts for groups type
expect(mockGraphClient.api).not.toHaveBeenCalledWith('/me/people');
// Should call groups endpoint only
expect(mockGraphClient.api).toHaveBeenCalledWith('/groups');
expect(mockGraphClient.search).toHaveBeenCalledWith(
'"displayName:team" OR "mail:team" OR "mailNickname:team"',
);
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(1); // 1 from groups only
});
it('should search all endpoints for all type', async () => {
// Mock responses for this specific test
mockGraphClient.get
.mockResolvedValueOnce(mockContactsResponse) // contacts call (both user and group)
.mockResolvedValueOnce(mockUsersResponse) // users call
.mockResolvedValueOnce(mockGroupsResponse); // groups call
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'test',
'all',
10,
);
// Should call contacts first with user filter
expect(mockGraphClient.api).toHaveBeenCalledWith('/me/people');
expect(mockGraphClient.search).toHaveBeenCalledWith('"test"');
expect(mockGraphClient.filter).toHaveBeenCalledWith(
"personType/subclass eq 'OrganizationUser'",
);
// Should call both users and groups endpoints
expect(mockGraphClient.api).toHaveBeenCalledWith('/users');
expect(mockGraphClient.api).toHaveBeenCalledWith('/groups');
expect(Array.isArray(result)).toBe(true);
expect(result).toHaveLength(4); // 2 from contacts + 1 from users + 1 from groups
});
it('should early exit if contacts reach limit', async () => {
// Mock contacts to return exactly the limit
const limitedContactsResponse = {
value: Array(10).fill({
id: 'contact-1',
displayName: 'Contact User',
mail: 'contact@company.com',
personType: { class: 'Person', subclass: 'OrganizationUser' },
}),
};
mockGraphClient.get.mockResolvedValueOnce(limitedContactsResponse);
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'test',
'all',
10,
);
// Should call contacts first
expect(mockGraphClient.api).toHaveBeenCalledWith('/me/people');
expect(mockGraphClient.search).toHaveBeenCalledWith('"test"');
// Should not call users endpoint since limit was reached
expect(mockGraphClient.api).not.toHaveBeenCalledWith('/users');
expect(result).toHaveLength(10);
});
it('should deduplicate results based on idOnTheSource', async () => {
// Mock responses with duplicate IDs
const duplicateContactsResponse = {
value: [
{
id: 'duplicate-id',
displayName: 'John Doe',
mail: 'john@company.com',
personType: { class: 'Person', subclass: 'OrganizationUser' },
},
],
};
const duplicateUsersResponse = {
value: [
{
id: 'duplicate-id', // Same ID as contact
displayName: 'John Doe',
mail: 'john@company.com',
},
],
};
mockGraphClient.get
.mockResolvedValueOnce(duplicateContactsResponse)
.mockResolvedValueOnce(duplicateUsersResponse);
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'john',
'users',
10,
);
// Should only return one result despite duplicate IDs
expect(result).toHaveLength(1);
expect(result[0].idOnTheSource).toBe('duplicate-id');
});
it('should handle Graph API errors gracefully', async () => {
mockGraphClient.get.mockRejectedValue(new Error('Graph API error'));
const result = await GraphApiService.searchEntraIdPrincipals(
'token',
'user',
'test',
'all',
10,
);
expect(result).toEqual([]);
});
});
describe('getUserEntraGroups', () => {
it('should fetch user groups from memberOf endpoint', async () => {
const mockGroupsResponse = {
value: [
{
id: 'group-1',
},
{
id: 'group-2',
},
],
};
mockGraphClient.get.mockResolvedValue(mockGroupsResponse);
const result = await GraphApiService.getUserEntraGroups('token', 'user');
expect(mockGraphClient.api).toHaveBeenCalledWith('/me/memberOf');
expect(mockGraphClient.select).toHaveBeenCalledWith('id');
expect(result).toHaveLength(2);
expect(result).toEqual(['group-1', 'group-2']);
});
it('should return empty array on error', async () => {
mockGraphClient.get.mockRejectedValue(new Error('API error'));
const result = await GraphApiService.getUserEntraGroups('token', 'user');
expect(result).toEqual([]);
});
it('should handle empty response', async () => {
const mockGroupsResponse = {
value: [],
};
mockGraphClient.get.mockResolvedValue(mockGroupsResponse);
const result = await GraphApiService.getUserEntraGroups('token', 'user');
expect(result).toEqual([]);
});
it('should handle missing value property', async () => {
mockGraphClient.get.mockResolvedValue({});
const result = await GraphApiService.getUserEntraGroups('token', 'user');
expect(result).toEqual([]);
});
});
describe('testGraphApiAccess', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should test all permissions and return success results', async () => {
// Mock successful responses for all tests
mockGraphClient.get
.mockResolvedValueOnce({ id: 'user-123', displayName: 'Test User' }) // /me test
.mockResolvedValueOnce({ value: [] }) // people OrganizationUser test
.mockResolvedValueOnce({ value: [] }) // people UnifiedGroup test
.mockResolvedValueOnce({ value: [] }) // /users endpoint test
.mockResolvedValueOnce({ value: [] }); // /groups endpoint test
const result = await GraphApiService.testGraphApiAccess('token', 'user');
expect(result).toEqual({
userAccess: true,
peopleAccess: true,
groupsAccess: true,
usersEndpointAccess: true,
groupsEndpointAccess: true,
errors: [],
});
// Verify all endpoints were tested
expect(mockGraphClient.api).toHaveBeenCalledWith('/me');
expect(mockGraphClient.api).toHaveBeenCalledWith('/me/people');
expect(mockGraphClient.api).toHaveBeenCalledWith('/users');
expect(mockGraphClient.api).toHaveBeenCalledWith('/groups');
expect(mockGraphClient.filter).toHaveBeenCalledWith(
"personType/subclass eq 'OrganizationUser'",
);
expect(mockGraphClient.filter).toHaveBeenCalledWith("personType/subclass eq 'UnifiedGroup'");
expect(mockGraphClient.search).toHaveBeenCalledWith('"displayName:test"');
});
it('should handle partial failures and record errors', async () => {
// Mock mixed success/failure responses
mockGraphClient.get
.mockResolvedValueOnce({ id: 'user-123', displayName: 'Test User' }) // /me success
.mockRejectedValueOnce(new Error('People access denied')) // people OrganizationUser fail
.mockResolvedValueOnce({ value: [] }) // people UnifiedGroup success
.mockRejectedValueOnce(new Error('Users endpoint access denied')) // /users fail
.mockResolvedValueOnce({ value: [] }); // /groups success
const result = await GraphApiService.testGraphApiAccess('token', 'user');
expect(result).toEqual({
userAccess: true,
peopleAccess: false,
groupsAccess: true,
usersEndpointAccess: false,
groupsEndpointAccess: true,
errors: [
'People.Read (OrganizationUser): People access denied',
'Users endpoint: Users endpoint access denied',
],
});
});
it('should handle complete Graph client creation failure', async () => {
// Mock token exchange failure to test error handling
if (client.genericGrantRequest) {
client.genericGrantRequest.mockRejectedValue(new Error('Token exchange failed'));
}
const result = await GraphApiService.testGraphApiAccess('invalid-token', 'user');
expect(result).toEqual({
userAccess: false,
peopleAccess: false,
groupsAccess: false,
usersEndpointAccess: false,
groupsEndpointAccess: false,
errors: ['Token exchange failed'],
});
});
it('should record all permission errors', async () => {
// Mock all requests to fail
mockGraphClient.get
.mockRejectedValueOnce(new Error('User.Read denied'))
.mockRejectedValueOnce(new Error('People.Read OrganizationUser denied'))
.mockRejectedValueOnce(new Error('People.Read UnifiedGroup denied'))
.mockRejectedValueOnce(new Error('Users directory access denied'))
.mockRejectedValueOnce(new Error('Groups directory access denied'));
const result = await GraphApiService.testGraphApiAccess('token', 'user');
expect(result).toEqual({
userAccess: false,
peopleAccess: false,
groupsAccess: false,
usersEndpointAccess: false,
groupsEndpointAccess: false,
errors: [
'User.Read: User.Read denied',
'People.Read (OrganizationUser): People.Read OrganizationUser denied',
'People.Read (UnifiedGroup): People.Read UnifiedGroup denied',
'Users endpoint: Users directory access denied',
'Groups endpoint: Groups directory access denied',
],
});
});
it('should test new endpoints with correct search patterns', async () => {
// Mock successful responses for endpoint testing
mockGraphClient.get
.mockResolvedValueOnce({ id: 'user-123', displayName: 'Test User' }) // /me
.mockResolvedValueOnce({ value: [] }) // people OrganizationUser
.mockResolvedValueOnce({ value: [] }) // people UnifiedGroup
.mockResolvedValueOnce({ value: [] }) // /users
.mockResolvedValueOnce({ value: [] }); // /groups
await GraphApiService.testGraphApiAccess('token', 'user');
// Verify /users endpoint test
expect(mockGraphClient.api).toHaveBeenCalledWith('/users');
expect(mockGraphClient.search).toHaveBeenCalledWith('"displayName:test"');
expect(mockGraphClient.select).toHaveBeenCalledWith('id,displayName,userPrincipalName');
// Verify /groups endpoint test
expect(mockGraphClient.api).toHaveBeenCalledWith('/groups');
expect(mockGraphClient.select).toHaveBeenCalledWith('id,displayName,mail');
});
it('should handle endpoint-specific permission failures', async () => {
// Mock specific endpoint failures
mockGraphClient.get
.mockResolvedValueOnce({ id: 'user-123', displayName: 'Test User' }) // /me success
.mockResolvedValueOnce({ value: [] }) // people OrganizationUser success
.mockResolvedValueOnce({ value: [] }) // people UnifiedGroup success
.mockRejectedValueOnce(new Error('Insufficient privileges')) // /users fail (User.Read.All needed)
.mockRejectedValueOnce(new Error('Access denied to groups')); // /groups fail (Group.Read.All needed)
const result = await GraphApiService.testGraphApiAccess('token', 'user');
expect(result).toEqual({
userAccess: true,
peopleAccess: true,
groupsAccess: true,
usersEndpointAccess: false,
groupsEndpointAccess: false,
errors: [
'Users endpoint: Insufficient privileges',
'Groups endpoint: Access denied to groups',
],
});
});
});
});

View File

@@ -1,86 +0,0 @@
const { getOpenIdConfig } = require('~/strategies/openidStrategy');
const { logger } = require('~/config');
const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const client = require('openid-client');
/**
* Get Microsoft Graph API token using existing token exchange mechanism
* @param {Object} user - User object with OpenID information
* @param {string} accessToken - Current access token from Authorization header
* @param {string} scopes - Graph API scopes for the token
* @param {boolean} fromCache - Whether to try getting token from cache first
* @returns {Promise<Object>} Graph API token response with access_token and expires_in
*/
async function getGraphApiToken(user, accessToken, scopes, fromCache = true) {
try {
if (!user.openidId) {
throw new Error('User must be authenticated via Entra ID to access Microsoft Graph');
}
if (!accessToken) {
throw new Error('Access token is required for token exchange');
}
if (!scopes) {
throw new Error('Graph API scopes are required for token exchange');
}
const config = getOpenIdConfig();
if (!config) {
throw new Error('OpenID configuration not available');
}
const cacheKey = `${user.openidId}:${scopes}`;
const tokensCache = getLogStores(CacheKeys.OPENID_EXCHANGED_TOKENS);
if (fromCache) {
const cachedToken = await tokensCache.get(cacheKey);
if (cachedToken) {
logger.debug(`[GraphTokenService] Using cached Graph API token for user: ${user.openidId}`);
return cachedToken;
}
}
logger.debug(`[GraphTokenService] Requesting new Graph API token for user: ${user.openidId}`);
logger.debug(`[GraphTokenService] Requested scopes: ${scopes}`);
const grantResponse = await client.genericGrantRequest(
config,
'urn:ietf:params:oauth:grant-type:jwt-bearer',
{
scope: scopes,
assertion: accessToken,
requested_token_use: 'on_behalf_of',
},
);
const tokenResponse = {
access_token: grantResponse.access_token,
token_type: 'Bearer',
expires_in: grantResponse.expires_in || 3600,
scope: scopes,
};
await tokensCache.set(
cacheKey,
tokenResponse,
(grantResponse.expires_in || 3600) * 1000, // Convert to milliseconds
);
logger.debug(
`[GraphTokenService] Successfully obtained and cached Graph API token for user: ${user.openidId}`,
);
return tokenResponse;
} catch (error) {
logger.error(
`[GraphTokenService] Failed to acquire Graph API token for user ${user.openidId}:`,
error,
);
throw new Error(`Graph token acquisition failed: ${error.message}`);
}
}
module.exports = {
getGraphApiToken,
};

View File

@@ -1,800 +0,0 @@
const mongoose = require('mongoose');
const { isEnabled } = require('@librechat/api');
const { getTransactionSupport, logger } = require('@librechat/data-schemas');
const { ResourceType, PrincipalType, PrincipalModel } = require('librechat-data-provider');
const {
entraIdPrincipalFeatureEnabled,
getUserOwnedEntraGroups,
getUserEntraGroups,
getGroupMembers,
getGroupOwners,
} = require('~/server/services/GraphApiService');
const {
findAccessibleResources: findAccessibleResourcesACL,
getEffectivePermissions: getEffectivePermissionsACL,
grantPermission: grantPermissionACL,
findEntriesByPrincipalsAndResource,
findGroupByExternalId,
findRoleByIdentifier,
getUserPrincipals,
hasPermission,
createGroup,
createUser,
updateUser,
findUser,
} = require('~/models');
const { AclEntry, AccessRole, Group } = require('~/db/models');
/** @type {boolean|null} */
let transactionSupportCache = null;
/**
* Validates that the resourceType is one of the supported enum values
* @param {string} resourceType - The resource type to validate
* @throws {Error} If resourceType is not valid
*/
const validateResourceType = (resourceType) => {
const validTypes = Object.values(ResourceType);
if (!validTypes.includes(resourceType)) {
throw new Error(`Invalid resourceType: ${resourceType}. Valid types: ${validTypes.join(', ')}`);
}
};
/**
* @import { TPrincipal } from 'librechat-data-provider'
*/
/**
* Grant a permission to a principal for a resource using a role
* @param {Object} params - Parameters for granting role-based permission
* @param {string} params.principalType - PrincipalType.USER, PrincipalType.GROUP, or PrincipalType.PUBLIC
* @param {string|mongoose.Types.ObjectId|null} params.principalId - The ID of the principal (null for PrincipalType.PUBLIC)
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @param {string} params.accessRoleId - The ID of the role (e.g., AccessRoleIds.AGENT_VIEWER, AccessRoleIds.AGENT_EDITOR)
* @param {string|mongoose.Types.ObjectId} params.grantedBy - User ID granting the permission
* @param {mongoose.ClientSession} [params.session] - Optional MongoDB session for transactions
* @returns {Promise<Object>} The created or updated ACL entry
*/
const grantPermission = async ({
principalType,
principalId,
resourceType,
resourceId,
accessRoleId,
grantedBy,
session,
}) => {
try {
if (!Object.values(PrincipalType).includes(principalType)) {
throw new Error(`Invalid principal type: ${principalType}`);
}
if (principalType !== PrincipalType.PUBLIC && !principalId) {
throw new Error('Principal ID is required for user, group, and role principals');
}
// Validate principalId based on type
if (principalId && principalType === PrincipalType.ROLE) {
// Role IDs are strings (role names)
if (typeof principalId !== 'string' || principalId.trim().length === 0) {
throw new Error(`Invalid role ID: ${principalId}`);
}
} else if (
principalType &&
principalType !== PrincipalType.PUBLIC &&
!mongoose.Types.ObjectId.isValid(principalId)
) {
// User and Group IDs must be valid ObjectIds
throw new Error(`Invalid principal ID: ${principalId}`);
}
if (!resourceId || !mongoose.Types.ObjectId.isValid(resourceId)) {
throw new Error(`Invalid resource ID: ${resourceId}`);
}
validateResourceType(resourceType);
// Get the role to determine permission bits
const role = await findRoleByIdentifier(accessRoleId);
if (!role) {
throw new Error(`Role ${accessRoleId} not found`);
}
// Ensure the role is for the correct resource type
if (role.resourceType !== resourceType) {
throw new Error(
`Role ${accessRoleId} is for ${role.resourceType} resources, not ${resourceType}`,
);
}
return await grantPermissionACL(
principalType,
principalId,
resourceType,
resourceId,
role.permBits,
grantedBy,
session,
role._id,
);
} catch (error) {
logger.error(`[PermissionService.grantPermission] Error: ${error.message}`);
throw error;
}
};
/**
* Check if a user has specific permission bits on a resource
* @param {Object} params - Parameters for checking permissions
* @param {string|mongoose.Types.ObjectId} params.userId - The ID of the user
* @param {string} [params.role] - Optional user role (if not provided, will query from DB)
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @param {number} params.requiredPermissions - The permission bits required (e.g., 1 for VIEW, 3 for VIEW+EDIT)
* @returns {Promise<boolean>} Whether the user has the required permission bits
*/
const checkPermission = async ({ userId, role, resourceType, resourceId, requiredPermission }) => {
try {
if (typeof requiredPermission !== 'number' || requiredPermission < 1) {
throw new Error('requiredPermission must be a positive number');
}
validateResourceType(resourceType);
// Get all principals for the user (user + groups + public)
const principals = await getUserPrincipals({ userId, role });
if (principals.length === 0) {
return false;
}
return await hasPermission(principals, resourceType, resourceId, requiredPermission);
} catch (error) {
logger.error(`[PermissionService.checkPermission] Error: ${error.message}`);
// Re-throw validation errors
if (error.message.includes('requiredPermission must be')) {
throw error;
}
return false;
}
};
/**
* Get effective permission bitmask for a user on a resource
* @param {Object} params - Parameters for getting effective permissions
* @param {string|mongoose.Types.ObjectId} params.userId - The ID of the user
* @param {string} [params.role] - Optional user role (if not provided, will query from DB)
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @returns {Promise<number>} Effective permission bitmask
*/
const getEffectivePermissions = async ({ userId, role, resourceType, resourceId }) => {
try {
validateResourceType(resourceType);
// Get all principals for the user (user + groups + public)
const principals = await getUserPrincipals({ userId, role });
if (principals.length === 0) {
return 0;
}
return await getEffectivePermissionsACL(principals, resourceType, resourceId);
} catch (error) {
logger.error(`[PermissionService.getEffectivePermissions] Error: ${error.message}`);
return 0;
}
};
/**
* Find all resources of a specific type that a user has access to with specific permission bits
* @param {Object} params - Parameters for finding accessible resources
* @param {string|mongoose.Types.ObjectId} params.userId - The ID of the user
* @param {string} [params.role] - Optional user role (if not provided, will query from DB)
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {number} params.requiredPermissions - The minimum permission bits required (e.g., 1 for VIEW, 3 for VIEW+EDIT)
* @returns {Promise<Array>} Array of resource IDs
*/
const findAccessibleResources = async ({ userId, role, resourceType, requiredPermissions }) => {
try {
if (typeof requiredPermissions !== 'number' || requiredPermissions < 1) {
throw new Error('requiredPermissions must be a positive number');
}
validateResourceType(resourceType);
// Get all principals for the user (user + groups + public)
const principalsList = await getUserPrincipals({ userId, role });
if (principalsList.length === 0) {
return [];
}
return await findAccessibleResourcesACL(principalsList, resourceType, requiredPermissions);
} catch (error) {
logger.error(`[PermissionService.findAccessibleResources] Error: ${error.message}`);
// Re-throw validation errors
if (error.message.includes('requiredPermissions must be')) {
throw error;
}
return [];
}
};
/**
* Find all publicly accessible resources of a specific type
* @param {Object} params - Parameters for finding publicly accessible resources
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {number} params.requiredPermissions - The minimum permission bits required (e.g., 1 for VIEW, 3 for VIEW+EDIT)
* @returns {Promise<Array>} Array of resource IDs
*/
const findPubliclyAccessibleResources = async ({ resourceType, requiredPermissions }) => {
try {
if (typeof requiredPermissions !== 'number' || requiredPermissions < 1) {
throw new Error('requiredPermissions must be a positive number');
}
validateResourceType(resourceType);
// Find all public ACL entries where the public principal has at least the required permission bits
const entries = await AclEntry.find({
principalType: PrincipalType.PUBLIC,
resourceType,
permBits: { $bitsAllSet: requiredPermissions },
}).distinct('resourceId');
return entries;
} catch (error) {
logger.error(`[PermissionService.findPubliclyAccessibleResources] Error: ${error.message}`);
// Re-throw validation errors
if (error.message.includes('requiredPermissions must be')) {
throw error;
}
return [];
}
};
/**
* Get available roles for a resource type
* @param {Object} params - Parameters for getting available roles
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @returns {Promise<Array>} Array of role definitions
*/
const getAvailableRoles = async ({ resourceType }) => {
validateResourceType(resourceType);
return await AccessRole.find({ resourceType }).lean();
};
/**
* Ensures a principal exists in the database based on TPrincipal data
* Creates user if it doesn't exist locally (for Entra ID users)
* @param {Object} principal - TPrincipal object from frontend
* @param {string} principal.type - PrincipalType.USER, PrincipalType.GROUP, or PrincipalType.PUBLIC
* @param {string} [principal.id] - Local database ID (null for Entra ID principals not yet synced)
* @param {string} principal.name - Display name
* @param {string} [principal.email] - Email address
* @param {string} [principal.source] - 'local' or 'entra'
* @param {string} [principal.idOnTheSource] - Entra ID object ID for external principals
* @returns {Promise<string|null>} Returns the principalId for database operations, null for public
*/
const ensurePrincipalExists = async function (principal) {
if (principal.type === PrincipalType.PUBLIC) {
return null;
}
if (principal.id) {
return principal.id;
}
if (principal.type === PrincipalType.USER && principal.source === 'entra') {
if (!principal.email || !principal.idOnTheSource) {
throw new Error('Entra ID user principals must have email and idOnTheSource');
}
let existingUser = await findUser({ idOnTheSource: principal.idOnTheSource });
if (!existingUser) {
existingUser = await findUser({ email: principal.email.toLowerCase() });
}
if (existingUser) {
if (!existingUser.idOnTheSource && principal.idOnTheSource) {
await updateUser(existingUser._id, {
idOnTheSource: principal.idOnTheSource,
provider: 'openid',
});
}
return existingUser._id.toString();
}
const userData = {
name: principal.name,
email: principal.email.toLowerCase(),
emailVerified: false,
provider: 'openid',
idOnTheSource: principal.idOnTheSource,
};
const userId = await createUser(userData, true, false);
return userId.toString();
}
if (principal.type === PrincipalType.GROUP) {
throw new Error('Group principals should be handled by group-specific methods');
}
throw new Error(`Unsupported principal type: ${principal.type}`);
};
/**
* Ensures a group principal exists in the database based on TPrincipal data
* Creates group if it doesn't exist locally (for Entra ID groups)
* For Entra ID groups, always synchronizes member IDs when authentication context is provided
* @param {Object} principal - TPrincipal object from frontend
* @param {string} principal.type - Must be PrincipalType.GROUP
* @param {string} [principal.id] - Local database ID (null for Entra ID principals not yet synced)
* @param {string} principal.name - Display name
* @param {string} [principal.email] - Email address
* @param {string} [principal.description] - Group description
* @param {string} [principal.source] - 'local' or 'entra'
* @param {string} [principal.idOnTheSource] - Entra ID object ID for external principals
* @param {Object} [authContext] - Optional authentication context for fetching member data
* @param {string} [authContext.accessToken] - Access token for Graph API calls
* @param {string} [authContext.sub] - Subject identifier
* @returns {Promise<string>} Returns the groupId for database operations
*/
const ensureGroupPrincipalExists = async function (principal, authContext = null) {
if (principal.type !== PrincipalType.GROUP) {
throw new Error(`Invalid principal type: ${principal.type}. Expected '${PrincipalType.GROUP}'`);
}
if (principal.source === 'entra') {
if (!principal.name || !principal.idOnTheSource) {
throw new Error('Entra ID group principals must have name and idOnTheSource');
}
let memberIds = [];
if (authContext && authContext.accessToken && authContext.sub) {
try {
memberIds = await getGroupMembers(
authContext.accessToken,
authContext.sub,
principal.idOnTheSource,
);
// Include group owners as members if feature is enabled
if (isEnabled(process.env.ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS)) {
const ownerIds = await getGroupOwners(
authContext.accessToken,
authContext.sub,
principal.idOnTheSource,
);
if (ownerIds && ownerIds.length > 0) {
memberIds.push(...ownerIds);
// Remove duplicates
memberIds = [...new Set(memberIds)];
}
}
} catch (error) {
logger.error('Failed to fetch group members from Graph API:', error);
}
}
let existingGroup = await findGroupByExternalId(principal.idOnTheSource, 'entra');
if (!existingGroup && principal.email) {
existingGroup = await Group.findOne({ email: principal.email.toLowerCase() }).lean();
}
if (existingGroup) {
const updateData = {};
let needsUpdate = false;
if (!existingGroup.idOnTheSource && principal.idOnTheSource) {
updateData.idOnTheSource = principal.idOnTheSource;
updateData.source = 'entra';
needsUpdate = true;
}
if (principal.description && existingGroup.description !== principal.description) {
updateData.description = principal.description;
needsUpdate = true;
}
if (principal.email && existingGroup.email !== principal.email.toLowerCase()) {
updateData.email = principal.email.toLowerCase();
needsUpdate = true;
}
if (authContext && authContext.accessToken && authContext.sub) {
updateData.memberIds = memberIds;
needsUpdate = true;
}
if (needsUpdate) {
await Group.findByIdAndUpdate(existingGroup._id, { $set: updateData }, { new: true });
}
return existingGroup._id.toString();
}
const groupData = {
name: principal.name,
source: 'entra',
idOnTheSource: principal.idOnTheSource,
memberIds: memberIds, // Store idOnTheSource values of group members (empty if no auth context)
};
if (principal.email) {
groupData.email = principal.email.toLowerCase();
}
if (principal.description) {
groupData.description = principal.description;
}
const newGroup = await createGroup(groupData);
return newGroup._id.toString();
}
if (principal.id && authContext == null) {
return principal.id;
}
throw new Error(`Unsupported group principal source: ${principal.source}`);
};
/**
* Synchronize user's Entra ID group memberships on sign-in
* Gets user's group IDs from GraphAPI and updates memberships only for existing groups in database
* Optionally includes groups the user owns if ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS is enabled
* @param {Object} user - User object with authentication context
* @param {string} user.openidId - User's OpenID subject identifier
* @param {string} user.idOnTheSource - User's Entra ID (oid from token claims)
* @param {string} user.provider - Authentication provider ('openid')
* @param {string} accessToken - Access token for Graph API calls
* @param {mongoose.ClientSession} [session] - Optional MongoDB session for transactions
* @returns {Promise<void>}
*/
const syncUserEntraGroupMemberships = async (user, accessToken, session = null) => {
try {
if (!entraIdPrincipalFeatureEnabled(user) || !accessToken || !user.idOnTheSource) {
return;
}
const memberGroupIds = await getUserEntraGroups(accessToken, user.openidId);
let allGroupIds = [...(memberGroupIds || [])];
// Include owned groups if feature is enabled
if (isEnabled(process.env.ENTRA_ID_INCLUDE_OWNERS_AS_MEMBERS)) {
const ownedGroupIds = await getUserOwnedEntraGroups(accessToken, user.openidId);
if (ownedGroupIds && ownedGroupIds.length > 0) {
allGroupIds.push(...ownedGroupIds);
// Remove duplicates
allGroupIds = [...new Set(allGroupIds)];
}
}
if (!allGroupIds || allGroupIds.length === 0) {
return;
}
const sessionOptions = session ? { session } : {};
await Group.updateMany(
{
idOnTheSource: { $in: allGroupIds },
source: 'entra',
memberIds: { $ne: user.idOnTheSource },
},
{ $addToSet: { memberIds: user.idOnTheSource } },
sessionOptions,
);
await Group.updateMany(
{
source: 'entra',
memberIds: user.idOnTheSource,
idOnTheSource: { $nin: allGroupIds },
},
{ $pull: { memberIds: user.idOnTheSource } },
sessionOptions,
);
} catch (error) {
logger.error(`[PermissionService.syncUserEntraGroupMemberships] Error syncing groups:`, error);
}
};
/**
* Check if public has a specific permission on a resource
* @param {Object} params - Parameters for checking public permission
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @param {number} params.requiredPermissions - The permission bits required (e.g., 1 for VIEW, 3 for VIEW+EDIT)
* @returns {Promise<boolean>} Whether public has the required permission bits
*/
const hasPublicPermission = async ({ resourceType, resourceId, requiredPermissions }) => {
try {
if (typeof requiredPermissions !== 'number' || requiredPermissions < 1) {
throw new Error('requiredPermissions must be a positive number');
}
validateResourceType(resourceType);
// Use public principal to check permissions
const publicPrincipal = [{ principalType: PrincipalType.PUBLIC }];
const entries = await findEntriesByPrincipalsAndResource(
publicPrincipal,
resourceType,
resourceId,
);
// Check if any entry has the required permission bits
return entries.some((entry) => (entry.permBits & requiredPermissions) === requiredPermissions);
} catch (error) {
logger.error(`[PermissionService.hasPublicPermission] Error: ${error.message}`);
// Re-throw validation errors
if (error.message.includes('requiredPermissions must be')) {
throw error;
}
return false;
}
};
/**
* Bulk update permissions for a resource (grant, update, revoke)
* Efficiently handles multiple permission changes in a single transaction
*
* @param {Object} params - Parameters for bulk permission update
* @param {string} params.resourceType - Type of resource (e.g., 'agent')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @param {Array<TPrincipal>} params.updatedPrincipals - Array of principals to grant/update permissions for
* @param {Array<TPrincipal>} params.revokedPrincipals - Array of principals to revoke permissions from
* @param {string|mongoose.Types.ObjectId} params.grantedBy - User ID making the changes
* @param {mongoose.ClientSession} [params.session] - Optional MongoDB session for transactions
* @returns {Promise<Object>} Results object with granted, updated, revoked arrays and error details
*/
const bulkUpdateResourcePermissions = async ({
resourceType,
resourceId,
updatedPrincipals = [],
revokedPrincipals = [],
grantedBy,
session,
}) => {
const supportsTransactions = await getTransactionSupport(mongoose, transactionSupportCache);
transactionSupportCache = supportsTransactions;
let localSession = session;
let shouldEndSession = false;
try {
if (!Array.isArray(updatedPrincipals)) {
throw new Error('updatedPrincipals must be an array');
}
if (!Array.isArray(revokedPrincipals)) {
throw new Error('revokedPrincipals must be an array');
}
if (!resourceId || !mongoose.Types.ObjectId.isValid(resourceId)) {
throw new Error(`Invalid resource ID: ${resourceId}`);
}
if (!localSession && supportsTransactions) {
localSession = await mongoose.startSession();
localSession.startTransaction();
shouldEndSession = true;
}
const sessionOptions = localSession ? { session: localSession } : {};
const roles = await AccessRole.find({ resourceType }).lean();
const rolesMap = new Map();
roles.forEach((role) => {
rolesMap.set(role.accessRoleId, role);
});
const results = {
granted: [],
updated: [],
revoked: [],
errors: [],
};
const bulkWrites = [];
for (const principal of updatedPrincipals) {
try {
if (!principal.accessRoleId) {
results.errors.push({
principal,
error: 'accessRoleId is required for updated principals',
});
continue;
}
const role = rolesMap.get(principal.accessRoleId);
if (!role) {
results.errors.push({
principal,
error: `Role ${principal.accessRoleId} not found`,
});
continue;
}
const query = {
principalType: principal.type,
resourceType,
resourceId,
};
if (principal.type !== PrincipalType.PUBLIC) {
query.principalId =
principal.type === PrincipalType.ROLE
? principal.id
: new mongoose.Types.ObjectId(principal.id);
}
const principalModelMap = {
[PrincipalType.USER]: PrincipalModel.USER,
[PrincipalType.GROUP]: PrincipalModel.GROUP,
[PrincipalType.ROLE]: PrincipalModel.ROLE,
};
const update = {
$set: {
permBits: role.permBits,
roleId: role._id,
grantedBy,
grantedAt: new Date(),
},
$setOnInsert: {
principalType: principal.type,
resourceType,
resourceId,
...(principal.type !== PrincipalType.PUBLIC && {
principalId:
principal.type === PrincipalType.ROLE
? principal.id
: new mongoose.Types.ObjectId(principal.id),
principalModel: principalModelMap[principal.type],
}),
},
};
bulkWrites.push({
updateOne: {
filter: query,
update: update,
upsert: true,
},
});
results.granted.push({
type: principal.type,
id: principal.id,
name: principal.name,
email: principal.email,
source: principal.source,
avatar: principal.avatar,
description: principal.description,
idOnTheSource: principal.idOnTheSource,
accessRoleId: principal.accessRoleId,
memberCount: principal.memberCount,
memberIds: principal.memberIds,
});
} catch (error) {
results.errors.push({
principal,
error: error.message,
});
}
}
if (bulkWrites.length > 0) {
await AclEntry.bulkWrite(bulkWrites, sessionOptions);
}
const deleteQueries = [];
for (const principal of revokedPrincipals) {
try {
const query = {
principalType: principal.type,
resourceType,
resourceId,
};
if (principal.type !== PrincipalType.PUBLIC) {
query.principalId =
principal.type === PrincipalType.ROLE
? principal.id
: new mongoose.Types.ObjectId(principal.id);
}
deleteQueries.push(query);
results.revoked.push({
type: principal.type,
id: principal.id,
name: principal.name,
email: principal.email,
source: principal.source,
avatar: principal.avatar,
description: principal.description,
idOnTheSource: principal.idOnTheSource,
memberCount: principal.memberCount,
});
} catch (error) {
results.errors.push({
principal,
error: error.message,
});
}
}
if (deleteQueries.length > 0) {
await AclEntry.deleteMany(
{
$or: deleteQueries,
},
sessionOptions,
);
}
if (shouldEndSession && supportsTransactions) {
await localSession.commitTransaction();
}
return results;
} catch (error) {
if (shouldEndSession && supportsTransactions) {
await localSession.abortTransaction();
}
logger.error(`[PermissionService.bulkUpdateResourcePermissions] Error: ${error.message}`);
throw error;
} finally {
if (shouldEndSession && localSession) {
localSession.endSession();
}
}
};
/**
* Remove all permissions for a resource (cleanup when resource is deleted)
* @param {Object} params - Parameters for removing all permissions
* @param {string} params.resourceType - Type of resource (e.g., 'agent', 'prompt')
* @param {string|mongoose.Types.ObjectId} params.resourceId - The ID of the resource
* @returns {Promise<Object>} Result of the deletion operation
*/
const removeAllPermissions = async ({ resourceType, resourceId }) => {
try {
validateResourceType(resourceType);
if (!resourceId || !mongoose.Types.ObjectId.isValid(resourceId)) {
throw new Error(`Invalid resource ID: ${resourceId}`);
}
const result = await AclEntry.deleteMany({
resourceType,
resourceId,
});
return result;
} catch (error) {
logger.error(`[PermissionService.removeAllPermissions] Error: ${error.message}`);
throw error;
}
};
module.exports = {
grantPermission,
checkPermission,
getEffectivePermissions,
findAccessibleResources,
findPubliclyAccessibleResources,
hasPublicPermission,
getAvailableRoles,
bulkUpdateResourcePermissions,
ensurePrincipalExists,
ensureGroupPrincipalExists,
syncUserEntraGroupMemberships,
removeAllPermissions,
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
const fs = require('fs');
const path = require('path');
const { sleep } = require('@librechat/agents');
const { getToolkitKey } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { zodToJsonSchema } = require('zod-to-json-schema');
const { Calculator } = require('@langchain/community/tools/calculator');
@@ -12,6 +11,7 @@ const {
ErrorTypes,
ContentTypes,
imageGenTools,
EToolResources,
EModelEndpoint,
actionDelimiter,
ImageVisionTool,
@@ -40,6 +40,30 @@ const { recordUsage } = require('~/server/services/Threads');
const { loadTools } = require('~/app/clients/tools/util');
const { redactMessage } = require('~/config/parsers');
/**
* @param {string} toolName
* @returns {string | undefined} toolKey
*/
function getToolkitKey(toolName) {
/** @type {string|undefined} */
let toolkitKey;
for (const toolkit of toolkits) {
if (toolName.startsWith(EToolResources.image_edit)) {
const splitMatches = toolkit.pluginKey.split('_');
const suffix = splitMatches[splitMatches.length - 1];
if (toolName.endsWith(suffix)) {
toolkitKey = toolkit.pluginKey;
break;
}
}
if (toolName.startsWith(toolkit.pluginKey)) {
toolkitKey = toolkit.pluginKey;
break;
}
}
return toolkitKey;
}
/**
* Loads and formats tools from the specified tool directory.
*
@@ -121,7 +145,7 @@ function loadAndFormatTools({ directory, adminFilter = [], adminIncluded = [] })
for (const toolInstance of basicToolInstances) {
const formattedTool = formatToOpenAIAssistantTool(toolInstance);
let toolName = formattedTool[Tools.function].name;
toolName = getToolkitKey({ toolkits, toolName }) ?? toolName;
toolName = getToolkitKey(toolName) ?? toolName;
if (filter.has(toolName) && included.size === 0) {
continue;
}
@@ -522,7 +546,6 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
if (includesWebSearch) {
webSearchCallbacks = createOnSearchResults(res);
}
const { loadedTools, toolContextMap } = await loadTools({
agent,
functions: true,

View File

@@ -2,90 +2,20 @@ const {
SystemRoles,
Permissions,
PermissionTypes,
isMemoryEnabled,
removeNullishValues,
} = require('librechat-data-provider');
const { logger } = require('@librechat/data-schemas');
const { isMemoryEnabled } = require('@librechat/api');
const { updateAccessPermissions, getRoleByName } = require('~/models/Role');
/**
* Updates role permissions intelligently - only updates permission types that:
* 1. Don't exist in the database (first time setup)
* 2. Are explicitly configured in the config file
* @param {object} params - The role name to update
* @param {string} params.roleName - The role name to update
* @param {object} params.allPermissions - All permissions to potentially update
* @param {object} params.interfaceConfig - The interface config from librechat.yaml
*/
async function updateRolePermissions({ roleName, allPermissions, interfaceConfig }) {
const existingRole = await getRoleByName(roleName);
const existingPermissions = existingRole?.permissions || {};
const permissionsToUpdate = {};
for (const [permType, perms] of Object.entries(allPermissions)) {
const permTypeExists = existingPermissions[permType];
const isExplicitlyConfigured = interfaceConfig && hasExplicitConfig(interfaceConfig, permType);
// Only update if: doesn't exist OR explicitly configured
if (!permTypeExists || isExplicitlyConfigured) {
permissionsToUpdate[permType] = perms;
if (!permTypeExists) {
logger.debug(`Role '${roleName}': Setting up default permissions for '${permType}'`);
} else if (isExplicitlyConfigured) {
logger.debug(`Role '${roleName}': Applying explicit config for '${permType}'`);
}
} else {
logger.debug(`Role '${roleName}': Preserving existing permissions for '${permType}'`);
}
}
if (Object.keys(permissionsToUpdate).length > 0) {
await updateAccessPermissions(roleName, permissionsToUpdate, existingRole);
}
}
/**
* Checks if a permission type has explicit configuration
*/
function hasExplicitConfig(interfaceConfig, permissionType) {
switch (permissionType) {
case PermissionTypes.PROMPTS:
return interfaceConfig.prompts !== undefined;
case PermissionTypes.BOOKMARKS:
return interfaceConfig.bookmarks !== undefined;
case PermissionTypes.MEMORIES:
return interfaceConfig.memories !== undefined;
case PermissionTypes.MULTI_CONVO:
return interfaceConfig.multiConvo !== undefined;
case PermissionTypes.AGENTS:
return interfaceConfig.agents !== undefined;
case PermissionTypes.TEMPORARY_CHAT:
return interfaceConfig.temporaryChat !== undefined;
case PermissionTypes.RUN_CODE:
return interfaceConfig.runCode !== undefined;
case PermissionTypes.WEB_SEARCH:
return interfaceConfig.webSearch !== undefined;
case PermissionTypes.PEOPLE_PICKER:
return interfaceConfig.peoplePicker !== undefined;
case PermissionTypes.MARKETPLACE:
return interfaceConfig.marketplace !== undefined;
case PermissionTypes.FILE_SEARCH:
return interfaceConfig.fileSearch !== undefined;
case PermissionTypes.FILE_CITATIONS:
return interfaceConfig.fileCitations !== undefined;
default:
return false;
}
}
const { updateAccessPermissions } = require('~/models/Role');
const { logger } = require('~/config');
/**
* Loads the default interface object.
* @param {TCustomConfig | undefined} config - The loaded custom configuration.
* @param {TConfigDefaults} configDefaults - The custom configuration default values.
* @param {SystemRoles} [roleName] - The role to load the default interface for, defaults to `'USER'`.
* @returns {Promise<TCustomConfig['interface']>} The default interface object.
*/
async function loadDefaultInterface(config, configDefaults) {
async function loadDefaultInterface(config, configDefaults, roleName = SystemRoles.USER) {
const { interface: interfaceConfig } = config ?? {};
const { interface: defaults } = configDefaults;
const hasModelSpecs = config?.modelSpecs?.list?.length > 0;
@@ -121,47 +51,37 @@ async function loadDefaultInterface(config, configDefaults) {
runCode: interfaceConfig?.runCode ?? defaults.runCode,
webSearch: interfaceConfig?.webSearch ?? defaults.webSearch,
fileSearch: interfaceConfig?.fileSearch ?? defaults.fileSearch,
fileCitations: interfaceConfig?.fileCitations ?? defaults.fileCitations,
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
peoplePicker: {
users: interfaceConfig?.peoplePicker?.users ?? defaults.peoplePicker?.users,
groups: interfaceConfig?.peoplePicker?.groups ?? defaults.peoplePicker?.groups,
roles: interfaceConfig?.peoplePicker?.roles ?? defaults.peoplePicker?.roles,
},
marketplace: {
use: interfaceConfig?.marketplace?.use ?? defaults.marketplace?.use,
},
});
for (const roleName of [SystemRoles.USER, SystemRoles.ADMIN]) {
await updateRolePermissions({
roleName,
allPermissions: {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: loadedInterface.memories,
[Permissions.OPT_OUT]: isPersonalizationEnabled,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: loadedInterface.peoplePicker?.users,
[Permissions.VIEW_GROUPS]: loadedInterface.peoplePicker?.groups,
[Permissions.VIEW_ROLES]: loadedInterface.peoplePicker?.roles,
},
[PermissionTypes.MARKETPLACE]: {
[Permissions.USE]: loadedInterface.marketplace?.use,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: loadedInterface.fileCitations },
},
interfaceConfig,
});
}
await updateAccessPermissions(roleName, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: loadedInterface.memories,
[Permissions.OPT_OUT]: isPersonalizationEnabled,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
});
await updateAccessPermissions(SystemRoles.ADMIN, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: loadedInterface.memories,
[Permissions.OPT_OUT]: isPersonalizationEnabled,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: loadedInterface.webSearch },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: loadedInterface.fileSearch },
});
let i = 0;
const logSettings = () => {

View File

@@ -1,19 +1,12 @@
const { SystemRoles, Permissions, PermissionTypes } = require('librechat-data-provider');
const { updateAccessPermissions, getRoleByName } = require('~/models/Role');
const { updateAccessPermissions } = require('~/models/Role');
const { loadDefaultInterface } = require('./interface');
jest.mock('~/models/Role', () => ({
updateAccessPermissions: jest.fn(),
getRoleByName: jest.fn(),
}));
describe('loadDefaultInterface', () => {
beforeEach(() => {
jest.clearAllMocks();
// Mock getRoleByName to return null (no existing permissions)
getRoleByName.mockResolvedValue(null);
});
it('should call updateAccessPermissions with the correct parameters when permission types are true', async () => {
const config = {
interface: {
@@ -26,58 +19,23 @@ describe('loadDefaultInterface', () => {
runCode: true,
webSearch: true,
fileSearch: true,
fileCitations: true,
peoplePicker: {
users: true,
groups: true,
roles: true,
},
marketplace: {
use: true,
},
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
const expectedPermissions = {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: true,
[Permissions.OPT_OUT]: undefined,
},
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: true },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: true,
[Permissions.VIEW_GROUPS]: true,
[Permissions.VIEW_ROLES]: true,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: true },
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
// Check USER role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
null,
);
// Check ADMIN role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
null,
);
});
});
it('should call updateAccessPermissions with false when permission types are false', async () => {
@@ -92,22 +50,13 @@ describe('loadDefaultInterface', () => {
runCode: false,
webSearch: false,
fileSearch: false,
fileCitations: false,
peoplePicker: {
users: false,
groups: false,
roles: false,
},
marketplace: {
use: false,
},
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
const expectedPermissions = {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false, [Permissions.OPT_OUT]: undefined },
@@ -116,31 +65,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: false },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: false },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: false },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: false,
[Permissions.VIEW_GROUPS]: false,
[Permissions.VIEW_ROLES]: false,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: false },
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
// Check USER role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
null,
);
// Check ADMIN role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
null,
);
});
});
it('should call updateAccessPermissions with undefined when permission types are not specified in config', async () => {
@@ -149,7 +75,7 @@ describe('loadDefaultInterface', () => {
await loadDefaultInterface(config, configDefaults);
const expectedPermissions = {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: {
@@ -161,31 +87,42 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: undefined },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: undefined,
[Permissions.VIEW_GROUPS]: undefined,
[Permissions.VIEW_ROLES]: undefined,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with undefined when permission types are explicitly undefined', async () => {
const config = {
interface: {
prompts: undefined,
bookmarks: undefined,
memories: undefined,
multiConvo: undefined,
agents: undefined,
temporaryChat: undefined,
runCode: undefined,
webSearch: undefined,
fileSearch: undefined,
},
};
const configDefaults = { interface: {} };
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
await loadDefaultInterface(config, configDefaults);
// Check USER role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
null,
);
// Check ADMIN role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
null,
);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: undefined,
[Permissions.OPT_OUT]: undefined,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with mixed values for permission types', async () => {
@@ -200,14 +137,13 @@ describe('loadDefaultInterface', () => {
runCode: false,
webSearch: true,
fileSearch: false,
fileCitations: true,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
const expectedPermissions = {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
@@ -216,34 +152,11 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: undefined },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: undefined,
[Permissions.VIEW_GROUPS]: undefined,
[Permissions.VIEW_ROLES]: undefined,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: true },
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
// Check USER role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
null,
);
// Check ADMIN role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
null,
);
});
});
it('should use default values when config is undefined', async () => {
it('should call updateAccessPermissions with true when config is undefined', async () => {
const config = undefined;
const configDefaults = {
interface: {
@@ -256,21 +169,12 @@ describe('loadDefaultInterface', () => {
runCode: true,
webSearch: true,
fileSearch: true,
fileCitations: true,
peoplePicker: {
users: true,
groups: true,
roles: true,
},
marketplace: {
use: false,
},
},
};
await loadDefaultInterface(config, configDefaults);
const expectedPermissions = {
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
@@ -279,164 +183,240 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: false },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: true,
[Permissions.VIEW_GROUPS]: true,
[Permissions.VIEW_ROLES]: true,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: true },
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
// Check USER role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
null,
);
// Check ADMIN role call
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
null,
);
});
});
it('should only update permissions that do not exist when no config provided', async () => {
// Mock that some permissions already exist
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
},
});
const config = undefined;
const configDefaults = {
interface: {
prompts: true,
bookmarks: true,
memories: true,
multiConvo: true,
agents: true,
temporaryChat: true,
runCode: true,
webSearch: true,
fileSearch: true,
fileCitations: true,
},
};
it('should call updateAccessPermissions with the correct parameters when multiConvo is true', async () => {
const config = { interface: { multiConvo: true } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
// Should be called with all permissions EXCEPT prompts and agents (which already exist)
const expectedPermissions = {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: undefined,
[Permissions.OPT_OUT]: undefined,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: undefined },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: undefined,
[Permissions.VIEW_GROUPS]: undefined,
[Permissions.VIEW_ROLES]: undefined,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: true },
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
expect.objectContaining({
permissions: {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
},
}),
);
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
expect.objectContaining({
permissions: {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
},
}),
);
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
});
});
it('should override existing permissions when explicitly configured', async () => {
// Mock that some permissions already exist
getRoleByName.mockResolvedValue({
permissions: {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
},
});
const config = {
interface: {
prompts: true, // Explicitly set, should override existing false
// agents not specified, so existing false should be preserved
// bookmarks not specified, so existing false should be preserved
},
};
const configDefaults = {
interface: {
prompts: false,
agents: true,
bookmarks: true,
},
};
it('should call updateAccessPermissions with false when multiConvo is false', async () => {
const config = { interface: { multiConvo: false } };
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
// Should update prompts (explicitly configured) and all other permissions that don't exist
const expectedPermissions = {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true }, // Explicitly configured
// All other permissions that don't exist in the database
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: undefined,
[Permissions.OPT_OUT]: undefined,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with undefined when multiConvo is not specified in config', async () => {
const config = {};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: {
[Permissions.USE]: undefined,
[Permissions.OPT_OUT]: undefined,
},
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.MARKETPLACE]: { [Permissions.USE]: undefined },
[PermissionTypes.PEOPLE_PICKER]: {
[Permissions.VIEW_USERS]: undefined,
[Permissions.VIEW_GROUPS]: undefined,
[Permissions.VIEW_ROLES]: undefined,
},
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_CITATIONS]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with all interface options including multiConvo', async () => {
const config = {
interface: {
prompts: true,
bookmarks: false,
memories: true,
multiConvo: true,
agents: false,
temporaryChat: true,
runCode: false,
fileSearch: true,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
});
});
it('should use default values for multiConvo when config is undefined', async () => {
const config = undefined;
const configDefaults = {
interface: {
prompts: true,
bookmarks: true,
memories: false,
multiConvo: false,
agents: undefined,
temporaryChat: undefined,
runCode: undefined,
webSearch: undefined,
fileSearch: true,
},
};
expect(updateAccessPermissions).toHaveBeenCalledTimes(2);
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.USER,
expectedPermissions,
expect.objectContaining({
permissions: expect.any(Object),
}),
);
expect(updateAccessPermissions).toHaveBeenCalledWith(
SystemRoles.ADMIN,
expectedPermissions,
expect.objectContaining({
permissions: expect.any(Object),
}),
);
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: false, [Permissions.OPT_OUT]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
});
});
it('should call updateAccessPermissions with the correct parameters when WEB_SEARCH is undefined', async () => {
const config = {
interface: {
prompts: true,
bookmarks: false,
memories: true,
multiConvo: true,
agents: false,
temporaryChat: true,
runCode: false,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: undefined },
});
});
it('should call updateAccessPermissions with the correct parameters when FILE_SEARCH is true', async () => {
const config = {
interface: {
fileSearch: true,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
});
});
it('should call updateAccessPermissions with false when FILE_SEARCH is false', async () => {
const config = {
interface: {
fileSearch: false,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: undefined },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: undefined },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: false },
});
});
it('should call updateAccessPermissions with all interface options including fileSearch', async () => {
const config = {
interface: {
prompts: true,
bookmarks: false,
memories: true,
multiConvo: true,
agents: false,
temporaryChat: true,
runCode: false,
webSearch: true,
fileSearch: true,
},
};
const configDefaults = { interface: {} };
await loadDefaultInterface(config, configDefaults);
expect(updateAccessPermissions).toHaveBeenCalledWith(SystemRoles.USER, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MEMORIES]: { [Permissions.USE]: true, [Permissions.OPT_OUT]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
[PermissionTypes.WEB_SEARCH]: { [Permissions.USE]: true },
[PermissionTypes.FILE_SEARCH]: { [Permissions.USE]: true },
});
});
});

View File

@@ -44,24 +44,4 @@ const getBufferMetadata = async (buffer) => {
};
};
/**
* Removes UUID prefix from filename for clean display
* Pattern: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx__filename.ext
* @param {string} fileName - The filename to clean
* @returns {string} - The cleaned filename without UUID prefix
*/
const cleanFileName = (fileName) => {
if (!fileName) {
return fileName;
}
// Remove UUID pattern: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx__
const cleaned = fileName.replace(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}__/i,
'',
);
return cleaned;
};
module.exports = { determineFileType, getBufferMetadata, cleanFileName };
module.exports = { determineFileType, getBufferMetadata };

View File

@@ -1,61 +0,0 @@
const { FileContext } = require('librechat-data-provider');
/**
* Determines the appropriate file storage strategy based on file type and configuration.
*
* @param {Object} config - App configuration object containing fileStrategy and fileStrategies
* @param {Object} options - File context options
* @param {boolean} options.isAvatar - Whether this is an avatar upload
* @param {boolean} options.isImage - Whether this is an image upload
* @param {string} options.context - File context from FileContext enum
* @returns {string} Storage strategy to use (e.g., 'local', 's3', 'azure')
*
* @example
* // Legacy single strategy
* getFileStrategy({ fileStrategy: 's3' }) // Returns 's3'
*
* @example
* // Granular strategies
* getFileStrategy(
* {
* fileStrategy: 's3',
* fileStrategies: { avatar: 'local', document: 's3' }
* },
* { isAvatar: true }
* ) // Returns 'local'
*/
function getFileStrategy(appLocals, { isAvatar = false, isImage = false, context = null } = {}) {
// Handle both old (config object) and new (app.locals object) calling patterns
const isAppLocals = appLocals.fileStrategy !== undefined;
const config = isAppLocals ? appLocals.config : appLocals;
const fileStrategy = isAppLocals ? appLocals.fileStrategy : appLocals.fileStrategy;
// Fallback to legacy single strategy if no granular config
if (!config?.fileStrategies) {
return fileStrategy || 'local'; // Default to 'local' if undefined
}
const strategies = config.fileStrategies;
const defaultStrategy = strategies.default || fileStrategy || 'local';
// Priority order for strategy selection:
// 1. Specific file type strategy
// 2. Default strategy from fileStrategies
// 3. Legacy fileStrategy
// 4. 'local' as final fallback
let selectedStrategy;
if (isAvatar || context === FileContext.avatar) {
selectedStrategy = strategies.avatar || defaultStrategy;
} else if (isImage || context === FileContext.image_generation) {
selectedStrategy = strategies.image || defaultStrategy;
} else {
// All other files (documents, attachments, etc.)
selectedStrategy = strategies.document || defaultStrategy;
}
return selectedStrategy || 'local'; // Final fallback to 'local'
}
module.exports = { getFileStrategy };

View File

@@ -1,10 +1,10 @@
const fs = require('fs');
const { isEnabled } = require('@librechat/api');
const LdapStrategy = require('passport-ldapauth');
const { SystemRoles } = require('librechat-data-provider');
const { logger } = require('@librechat/data-schemas');
const { SystemRoles, ErrorTypes } = require('librechat-data-provider');
const { createUser, findUser, updateUser, countUsers } = require('~/models');
const { getBalanceConfig } = require('~/server/services/Config');
const { isEnabled } = require('~/server/utils');
const {
LDAP_URL,
@@ -90,14 +90,6 @@ const ldapLogin = new LdapStrategy(ldapOptions, async (userinfo, done) => {
(LDAP_ID && userinfo[LDAP_ID]) || userinfo.uid || userinfo.sAMAccountName || userinfo.mail;
let user = await findUser({ ldapId });
if (user && user.provider !== 'ldap') {
logger.info(
`[ldapStrategy] User ${user.email} already exists with provider ${user.provider}`,
);
return done(null, false, {
message: ErrorTypes.AUTH_FAILED,
});
}
const fullNameAttributes = LDAP_FULL_NAME && LDAP_FULL_NAME.split(',');
const fullName =

View File

@@ -3,9 +3,9 @@ const fetch = require('node-fetch');
const passport = require('passport');
const client = require('openid-client');
const jwtDecode = require('jsonwebtoken/decode');
const { CacheKeys } = require('librechat-data-provider');
const { HttpsProxyAgent } = require('https-proxy-agent');
const { hashToken, logger } = require('@librechat/data-schemas');
const { CacheKeys, ErrorTypes } = require('librechat-data-provider');
const { Strategy: OpenIDStrategy } = require('openid-client/passport');
const { isEnabled, safeStringify, logHeaders } = require('@librechat/api');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
@@ -320,14 +320,6 @@ async function setupOpenId() {
} for openidId: ${claims.sub}`,
);
}
if (user != null && user.provider !== 'openid') {
logger.info(
`[openidStrategy] Attempted OpenID login by user ${user.email}, was registered with "${user.provider}" provider`,
);
return done(null, false, {
message: ErrorTypes.AUTH_FAILED,
});
}
const userinfo = {
...claims,
...(await getUserInfo(openidConfig, tokenset.access_token, claims.sub)),
@@ -381,7 +373,6 @@ async function setupOpenId() {
email: userinfo.email || '',
emailVerified: userinfo.email_verified || false,
name: fullName,
idOnTheSource: userinfo.oid,
};
const balanceConfig = await getBalanceConfig();
@@ -392,7 +383,6 @@ async function setupOpenId() {
user.openidId = userinfo.sub;
user.username = username;
user.name = fullName;
user.idOnTheSource = userinfo.oid;
}
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {

View File

@@ -1,8 +1,7 @@
const fetch = require('node-fetch');
const jwtDecode = require('jsonwebtoken/decode');
const { ErrorTypes } = require('librechat-data-provider');
const { findUser, createUser, updateUser } = require('~/models');
const { setupOpenId } = require('./openidStrategy');
const { findUser, createUser, updateUser } = require('~/models');
// --- Mocks ---
jest.mock('node-fetch');
@@ -51,7 +50,7 @@ jest.mock('openid-client', () => {
issuer: 'https://fake-issuer.com',
// Add any other properties needed by the implementation
}),
fetchUserInfo: jest.fn().mockImplementation(() => {
fetchUserInfo: jest.fn().mockImplementation((config, accessToken, sub) => {
// Only return additional properties, but don't override any claims
return Promise.resolve({});
}),
@@ -262,20 +261,17 @@ describe('setupOpenId', () => {
});
it('should update an existing user on login', async () => {
// Arrange simulate that a user already exists with openid provider
// Arrange simulate that a user already exists
const existingUser = {
_id: 'existingUserId',
provider: 'openid',
provider: 'local',
email: tokenset.claims().email,
openidId: '',
username: '',
name: '',
};
findUser.mockImplementation(async (query) => {
if (
query.openidId === tokenset.claims().sub ||
(query.email === tokenset.claims().email && query.provider === 'openid')
) {
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
return existingUser;
}
return null;
@@ -298,38 +294,12 @@ describe('setupOpenId', () => {
);
});
it('should block login when email exists with different provider', async () => {
// Arrange simulate that a user exists with same email but different provider
const existingUser = {
_id: 'existingUserId',
provider: 'google',
email: tokenset.claims().email,
googleId: 'some-google-id',
username: 'existinguser',
name: 'Existing User',
};
findUser.mockImplementation(async (query) => {
if (query.email === tokenset.claims().email && !query.provider) {
return existingUser;
}
return null;
});
// Act
const result = await validate(tokenset);
// Assert verify that the strategy rejects login
expect(result.user).toBe(false);
expect(result.details.message).toBe(ErrorTypes.AUTH_FAILED);
expect(createUser).not.toHaveBeenCalled();
expect(updateUser).not.toHaveBeenCalled();
});
it('should enforce the required role and reject login if missing', async () => {
// Arrange simulate a token without the required role.
jwtDecode.mockReturnValue({
roles: ['SomeOtherRole'],
});
const userinfo = tokenset.claims();
// Act
const { user, details } = await validate(tokenset);
@@ -340,6 +310,9 @@ describe('setupOpenId', () => {
});
it('should attempt to download and save the avatar if picture is provided', async () => {
// Arrange ensure userinfo contains a picture URL
const userinfo = tokenset.claims();
// Act
const { user } = await validate(tokenset);

View File

@@ -22,12 +22,9 @@ const handleExistingUser = async (oldUser, avatarUrl) => {
const isLocal = fileStrategy === FileSources.local;
let updatedAvatar = false;
const hasManualFlag =
typeof oldUser?.avatar === 'string' && oldUser.avatar.includes('?manual=true');
if (isLocal && (!oldUser?.avatar || !hasManualFlag)) {
if (isLocal && (oldUser.avatar === null || !oldUser.avatar.includes('?manual=true'))) {
updatedAvatar = avatarUrl;
} else if (!isLocal && (!oldUser?.avatar || !hasManualFlag)) {
} else if (!isLocal && (oldUser.avatar === null || !oldUser.avatar.includes('?manual=true'))) {
const userId = oldUser._id;
const resizedBuffer = await resizeAvatar({
userId,

Some files were not shown because too many files have changed in this diff Show More