Compare commits
78 Commits
feat/compo
...
feat/merma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f67dd1b1b7 | ||
|
|
4136dda7c7 | ||
|
|
c53bdc1fef | ||
|
|
170cc340d8 | ||
|
|
f1b29ffb45 | ||
|
|
6aa4bb5a4a | ||
|
|
9f44187351 | ||
|
|
d2e1ca4c4a | ||
|
|
8e869f2274 | ||
|
|
2e1874e596 | ||
|
|
929b433662 | ||
|
|
1e4f1f780c | ||
|
|
4733f10e41 | ||
|
|
110984b48f | ||
|
|
19320f2296 | ||
|
|
8523074e87 | ||
|
|
e4531d682d | ||
|
|
4bbdc4c402 | ||
|
|
8ca4cf3d2f | ||
|
|
13a9bcdd48 | ||
|
|
4b32ec42c6 | ||
|
|
4918899c8d | ||
|
|
7e37211458 | ||
|
|
e57fc83d40 | ||
|
|
550610dba9 | ||
|
|
916cd46221 | ||
|
|
12b08183ff | ||
|
|
f4d97e1672 | ||
|
|
035fa081c1 | ||
|
|
aecf8f19a6 | ||
|
|
35f548a94d | ||
|
|
e60c0cf201 | ||
|
|
5b392f9cb0 | ||
|
|
e0f468da20 | ||
|
|
91a2df4759 | ||
|
|
97a99985fa | ||
|
|
3554625a06 | ||
|
|
a37bf6719c | ||
|
|
e513f50c08 | ||
|
|
f5511e4a4e | ||
|
|
a288ad1d9c | ||
|
|
458580ec87 | ||
|
|
4285d5841c | ||
|
|
5ee55cda4f | ||
|
|
404d40cbef | ||
|
|
f4680b016c | ||
|
|
077224b351 | ||
|
|
9c70d1db96 | ||
|
|
543281da6c | ||
|
|
24800bfbeb | ||
|
|
07e08143e4 | ||
|
|
8ba61a86f4 | ||
|
|
56ad92fb1c | ||
|
|
1ceb52d2b5 | ||
|
|
5d267aa8e2 | ||
|
|
59d00e99f3 | ||
|
|
738d04fac4 | ||
|
|
8a5dbac0f9 | ||
|
|
434289fe92 | ||
|
|
a648ad3d13 | ||
|
|
55d63caaf4 | ||
|
|
313539d1ed | ||
|
|
f869d772f7 | ||
|
|
20100e120b | ||
|
|
3f3cfefc52 | ||
|
|
3e1591d404 | ||
|
|
1060ae8040 | ||
|
|
dd67e463e4 | ||
|
|
d60ad61325 | ||
|
|
452151e408 | ||
|
|
33b4a97b42 | ||
|
|
9cdc62b655 | ||
|
|
799f0e5810 | ||
|
|
cbda3cb529 | ||
|
|
3ab1bd65e5 | ||
|
|
c551ba21f5 | ||
|
|
c87422a1e0 | ||
|
|
b169306096 |
19
.env.example
19
.env.example
@@ -142,10 +142,10 @@ GOOGLE_KEY=user_provided
|
||||
# GOOGLE_AUTH_HEADER=true
|
||||
|
||||
# Gemini API (AI Studio)
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash,gemini-2.0-flash-lite
|
||||
|
||||
# Vertex AI
|
||||
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
|
||||
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
|
||||
|
||||
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
|
||||
|
||||
@@ -349,6 +349,11 @@ REGISTRATION_VIOLATION_SCORE=1
|
||||
CONCURRENT_VIOLATION_SCORE=1
|
||||
MESSAGE_VIOLATION_SCORE=1
|
||||
NON_BROWSER_VIOLATION_SCORE=20
|
||||
TTS_VIOLATION_SCORE=0
|
||||
STT_VIOLATION_SCORE=0
|
||||
FORK_VIOLATION_SCORE=0
|
||||
IMPORT_VIOLATION_SCORE=0
|
||||
FILE_UPLOAD_VIOLATION_SCORE=0
|
||||
|
||||
LOGIN_MAX=7
|
||||
LOGIN_WINDOW=5
|
||||
@@ -453,8 +458,8 @@ OPENID_REUSE_TOKENS=
|
||||
OPENID_JWKS_URL_CACHE_ENABLED=
|
||||
OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching
|
||||
#Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint.
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API
|
||||
OPENID_ON_BEHALF_FLOW_FOR_USERINFO_REQUIRED=
|
||||
OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for Microsoft Graph API
|
||||
# Set to true to use the OpenID Connect end session endpoint for logout
|
||||
OPENID_USE_END_SESSION_ENDPOINT=
|
||||
|
||||
@@ -575,6 +580,10 @@ ALLOW_SHARED_LINKS_PUBLIC=true
|
||||
# If you have another service in front of your LibreChat doing compression, disable express based compression here
|
||||
# DISABLE_COMPRESSION=true
|
||||
|
||||
# If you have gzipped version of uploaded image images in the same folder, this will enable gzip scan and serving of these images
|
||||
# Note: The images folder will be scanned on startup and a ma kept in memory. Be careful for large number of images.
|
||||
# ENABLE_IMAGE_OUTPUT_GZIP_SCAN=true
|
||||
|
||||
#===================================================#
|
||||
# UI #
|
||||
#===================================================#
|
||||
@@ -657,4 +666,4 @@ OPENWEATHER_API_KEY=
|
||||
# Reranker (Required)
|
||||
# JINA_API_KEY=your_jina_api_key
|
||||
# or
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
# COHERE_API_KEY=your_cohere_api_key
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -56,6 +56,7 @@ bower_components/
|
||||
.clineignore
|
||||
.cursor
|
||||
.aider*
|
||||
CLAUDE.md
|
||||
|
||||
# Floobits
|
||||
.floo
|
||||
@@ -124,4 +125,4 @@ helm/**/.values.yaml
|
||||
!/client/src/@types/i18next.d.ts
|
||||
|
||||
# SAML Idp cert
|
||||
*.cert
|
||||
*.cert
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.7.8
|
||||
# v0.7.9-rc1
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.7.8
|
||||
# v0.7.9-rc1
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
- 🖥️ **UI & Experience** inspired by ChatGPT with enhanced design and features
|
||||
|
||||
- 🤖 **AI Model Selection**:
|
||||
- Anthropic (Claude), AWS Bedrock, OpenAI, Azure OpenAI, Google, Vertex AI, OpenAI Assistants API (incl. Azure)
|
||||
- Anthropic (Claude), AWS Bedrock, OpenAI, Azure OpenAI, Google, Vertex AI, OpenAI Responses API (incl. Azure)
|
||||
- [Custom Endpoints](https://www.librechat.ai/docs/quick_start/custom_endpoints): Use any OpenAI-compatible API with LibreChat, no proxy required
|
||||
- Compatible with [Local & Remote AI Providers](https://www.librechat.ai/docs/configuration/librechat_yaml/ai_endpoints):
|
||||
- Ollama, groq, Cohere, Mistral AI, Apple MLX, koboldcpp, together.ai,
|
||||
@@ -66,10 +66,9 @@
|
||||
- 🔦 **Agents & Tools Integration**:
|
||||
- **[LibreChat Agents](https://www.librechat.ai/docs/features/agents)**:
|
||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers without coding
|
||||
- Flexible & Extensible: Attach tools like DALL-E-3, file search, code execution, and more
|
||||
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, and more
|
||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API, and more
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
- Use LibreChat Agents and OpenAI Assistants with Files, Code Interpreter, Tools, and API Actions
|
||||
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
|
||||
@@ -13,7 +13,6 @@ const {
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { truncateToolCallOutputs } = require('./prompts');
|
||||
const { addSpaceIfNeeded } = require('~/server/utils');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const TextStream = require('./TextStream');
|
||||
const { logger } = require('~/config');
|
||||
@@ -572,7 +571,7 @@ class BaseClient {
|
||||
});
|
||||
}
|
||||
|
||||
const { generation = '' } = opts;
|
||||
const { editedContent } = opts;
|
||||
|
||||
// It's not necessary to push to currentMessages
|
||||
// depending on subclass implementation of handling messages
|
||||
@@ -587,11 +586,21 @@ class BaseClient {
|
||||
isCreatedByUser: false,
|
||||
model: this.modelOptions?.model ?? this.model,
|
||||
sender: this.sender,
|
||||
text: generation,
|
||||
};
|
||||
this.currentMessages.push(userMessage, latestMessage);
|
||||
} else {
|
||||
latestMessage.text = generation;
|
||||
} else if (editedContent != null) {
|
||||
// Handle editedContent for content parts
|
||||
if (editedContent && latestMessage.content && Array.isArray(latestMessage.content)) {
|
||||
const { index, text, type } = editedContent;
|
||||
if (index >= 0 && index < latestMessage.content.length) {
|
||||
const contentPart = latestMessage.content[index];
|
||||
if (type === ContentTypes.THINK && contentPart.type === ContentTypes.THINK) {
|
||||
contentPart[ContentTypes.THINK] = text;
|
||||
} else if (type === ContentTypes.TEXT && contentPart.type === ContentTypes.TEXT) {
|
||||
contentPart[ContentTypes.TEXT] = text;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this.continued = true;
|
||||
} else {
|
||||
@@ -672,16 +681,32 @@ class BaseClient {
|
||||
};
|
||||
|
||||
if (typeof completion === 'string') {
|
||||
responseMessage.text = addSpaceIfNeeded(generation) + completion;
|
||||
responseMessage.text = completion;
|
||||
} else if (
|
||||
Array.isArray(completion) &&
|
||||
(this.clientName === EModelEndpoint.agents ||
|
||||
isParamEndpoint(this.options.endpoint, this.options.endpointType))
|
||||
) {
|
||||
responseMessage.text = '';
|
||||
responseMessage.content = completion;
|
||||
|
||||
if (!opts.editedContent || this.currentMessages.length === 0) {
|
||||
responseMessage.content = completion;
|
||||
} else {
|
||||
const latestMessage = this.currentMessages[this.currentMessages.length - 1];
|
||||
if (!latestMessage?.content) {
|
||||
responseMessage.content = completion;
|
||||
} else {
|
||||
const existingContent = [...latestMessage.content];
|
||||
const { type: editedType } = opts.editedContent;
|
||||
responseMessage.content = this.mergeEditedContent(
|
||||
existingContent,
|
||||
completion,
|
||||
editedType,
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (Array.isArray(completion)) {
|
||||
responseMessage.text = addSpaceIfNeeded(generation) + completion.join('');
|
||||
responseMessage.text = completion.join('');
|
||||
}
|
||||
|
||||
if (
|
||||
@@ -1095,6 +1120,50 @@ class BaseClient {
|
||||
return numTokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges completion content with existing content when editing TEXT or THINK types
|
||||
* @param {Array} existingContent - The existing content array
|
||||
* @param {Array} newCompletion - The new completion content
|
||||
* @param {string} editedType - The type of content being edited
|
||||
* @returns {Array} The merged content array
|
||||
*/
|
||||
mergeEditedContent(existingContent, newCompletion, editedType) {
|
||||
if (!newCompletion.length) {
|
||||
return existingContent.concat(newCompletion);
|
||||
}
|
||||
|
||||
if (editedType !== ContentTypes.TEXT && editedType !== ContentTypes.THINK) {
|
||||
return existingContent.concat(newCompletion);
|
||||
}
|
||||
|
||||
const lastIndex = existingContent.length - 1;
|
||||
const lastExisting = existingContent[lastIndex];
|
||||
const firstNew = newCompletion[0];
|
||||
|
||||
if (lastExisting?.type !== firstNew?.type || firstNew?.type !== editedType) {
|
||||
return existingContent.concat(newCompletion);
|
||||
}
|
||||
|
||||
const mergedContent = [...existingContent];
|
||||
if (editedType === ContentTypes.TEXT) {
|
||||
mergedContent[lastIndex] = {
|
||||
...mergedContent[lastIndex],
|
||||
[ContentTypes.TEXT]:
|
||||
(mergedContent[lastIndex][ContentTypes.TEXT] || '') + (firstNew[ContentTypes.TEXT] || ''),
|
||||
};
|
||||
} else {
|
||||
mergedContent[lastIndex] = {
|
||||
...mergedContent[lastIndex],
|
||||
[ContentTypes.THINK]:
|
||||
(mergedContent[lastIndex][ContentTypes.THINK] || '') +
|
||||
(firstNew[ContentTypes.THINK] || ''),
|
||||
};
|
||||
}
|
||||
|
||||
// Add remaining completion items
|
||||
return mergedContent.concat(newCompletion.slice(1));
|
||||
}
|
||||
|
||||
async sendPayload(payload, opts = {}) {
|
||||
if (opts && typeof opts === 'object') {
|
||||
this.setOptions(opts);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { google } = require('googleapis');
|
||||
const { Tokenizer } = require('@librechat/api');
|
||||
const { concat } = require('@langchain/core/utils/stream');
|
||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||
const { Tokenizer, getSafetySettings } = require('@librechat/api');
|
||||
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
|
||||
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
|
||||
const { HumanMessage, SystemMessage } = require('@langchain/core/messages');
|
||||
@@ -12,13 +12,13 @@ const {
|
||||
endpointSettings,
|
||||
parseTextParts,
|
||||
EModelEndpoint,
|
||||
googleSettings,
|
||||
ContentTypes,
|
||||
VisionModes,
|
||||
ErrorTypes,
|
||||
Constants,
|
||||
AuthKeys,
|
||||
} = require('librechat-data-provider');
|
||||
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
@@ -166,6 +166,16 @@ class GoogleClient extends BaseClient {
|
||||
);
|
||||
}
|
||||
|
||||
// Add thinking configuration
|
||||
this.modelOptions.thinkingConfig = {
|
||||
thinkingBudget:
|
||||
(this.modelOptions.thinking ?? googleSettings.thinking.default)
|
||||
? this.modelOptions.thinkingBudget
|
||||
: 0,
|
||||
};
|
||||
delete this.modelOptions.thinking;
|
||||
delete this.modelOptions.thinkingBudget;
|
||||
|
||||
this.sender =
|
||||
this.options.sender ??
|
||||
getResponseSender({
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const axios = require('axios');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
|
||||
const footer = `Use the context as your learned knowledge to better answer the user.
|
||||
|
||||
@@ -18,7 +19,7 @@ function createContextHandlers(req, userMessageContent) {
|
||||
const queryPromises = [];
|
||||
const processedFiles = [];
|
||||
const processedIds = new Set();
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
const useFullContext = isEnabled(process.env.RAG_USE_FULL_CONTEXT);
|
||||
|
||||
const query = async (file) => {
|
||||
|
||||
@@ -1,26 +1,35 @@
|
||||
const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Tools, EToolResources } = require('librechat-data-provider');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Object} options
|
||||
* @param {ServerRequest} options.req
|
||||
* @param {Agent['tool_resources']} options.tool_resources
|
||||
* @param {string} [options.agentId] - The agent ID for file access control
|
||||
* @returns {Promise<{
|
||||
* files: Array<{ file_id: string; filename: string }>,
|
||||
* toolContext: string
|
||||
* }>}
|
||||
*/
|
||||
const primeFiles = async (options) => {
|
||||
const { tool_resources } = options;
|
||||
const { tool_resources, req, agentId } = options;
|
||||
const file_ids = tool_resources?.[EToolResources.file_search]?.file_ids ?? [];
|
||||
const agentResourceIds = new Set(file_ids);
|
||||
const resourceFiles = tool_resources?.[EToolResources.file_search]?.files ?? [];
|
||||
const dbFiles = ((await getFiles({ file_id: { $in: file_ids } })) ?? []).concat(resourceFiles);
|
||||
const dbFiles = (
|
||||
(await getFiles(
|
||||
{ file_id: { $in: file_ids } },
|
||||
null,
|
||||
{ text: 0 },
|
||||
{ userId: req?.user?.id, agentId },
|
||||
)) ?? []
|
||||
).concat(resourceFiles);
|
||||
|
||||
let toolContext = `- Note: Semantic search is available through the ${Tools.file_search} tool but no files are currently loaded. Request the user to upload documents to search through.`;
|
||||
|
||||
@@ -59,7 +68,7 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
if (files.length === 0) {
|
||||
return 'No files to search. Instruct the user to add files for the search.';
|
||||
}
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
if (!jwtToken) {
|
||||
return 'There was an error authenticating the file search request.';
|
||||
}
|
||||
|
||||
@@ -245,7 +245,13 @@ const loadTools = async ({
|
||||
authFields: [EnvVar.CODE_API_KEY],
|
||||
});
|
||||
const codeApiKey = authValues[EnvVar.CODE_API_KEY];
|
||||
const { files, toolContext } = await primeCodeFiles(options, codeApiKey);
|
||||
const { files, toolContext } = await primeCodeFiles(
|
||||
{
|
||||
...options,
|
||||
agentId: agent?.id,
|
||||
},
|
||||
codeApiKey,
|
||||
);
|
||||
if (toolContext) {
|
||||
toolContextMap[tool] = toolContext;
|
||||
}
|
||||
@@ -260,7 +266,10 @@ const loadTools = async ({
|
||||
continue;
|
||||
} else if (tool === Tools.file_search) {
|
||||
requestedTools[tool] = async () => {
|
||||
const { files, toolContext } = await primeSearchFiles(options);
|
||||
const { files, toolContext } = await primeSearchFiles({
|
||||
...options,
|
||||
agentId: agent?.id,
|
||||
});
|
||||
if (toolContext) {
|
||||
toolContextMap[tool] = toolContext;
|
||||
}
|
||||
|
||||
3
api/cache/banViolation.js
vendored
3
api/cache/banViolation.js
vendored
@@ -1,7 +1,8 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, math } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, math, removePorts } = require('~/server/utils');
|
||||
const { deleteAllUserSessions } = require('~/models');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const getLogStores = require('./getLogStores');
|
||||
|
||||
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};
|
||||
|
||||
2
api/cache/getLogStores.js
vendored
2
api/cache/getLogStores.js
vendored
@@ -1,7 +1,7 @@
|
||||
const { Keyv } = require('keyv');
|
||||
const { isEnabled, math } = require('@librechat/api');
|
||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
||||
const { logFile, violationFile } = require('./keyvFiles');
|
||||
const { isEnabled, math } = require('~/server/utils');
|
||||
const keyvRedis = require('./keyvRedis');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
|
||||
|
||||
2
api/cache/logViolation.js
vendored
2
api/cache/logViolation.js
vendored
@@ -9,7 +9,7 @@ const banViolation = require('./banViolation');
|
||||
* @param {Object} res - Express response object.
|
||||
* @param {string} type - The type of violation.
|
||||
* @param {Object} errorMessage - The error message to log.
|
||||
* @param {number} [score=1] - The severity of the violation. Defaults to 1
|
||||
* @param {number | string} [score=1] - The severity of the violation. Defaults to 1
|
||||
*/
|
||||
const logViolation = async (req, res, type, errorMessage, score = 1) => {
|
||||
const userId = req.user?.id ?? req.user?._id;
|
||||
|
||||
@@ -90,7 +90,7 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
}
|
||||
|
||||
const instructions = req.body.promptPrefix;
|
||||
return {
|
||||
const result = {
|
||||
id: agent_id,
|
||||
instructions,
|
||||
provider: endpoint,
|
||||
@@ -98,6 +98,11 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
model,
|
||||
tools,
|
||||
};
|
||||
|
||||
if (ephemeralAgent?.artifacts != null && ephemeralAgent.artifacts) {
|
||||
result.artifacts = ephemeralAgent.artifacts;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
const { Conversation } = require('~/db/models');
|
||||
|
||||
@@ -98,10 +100,15 @@ module.exports = {
|
||||
update.conversationId = newConversationId;
|
||||
}
|
||||
|
||||
if (req.body.isTemporary) {
|
||||
const expiredAt = new Date();
|
||||
expiredAt.setDate(expiredAt.getDate() + 30);
|
||||
update.expiredAt = expiredAt;
|
||||
if (req?.body?.isTemporary) {
|
||||
try {
|
||||
const customConfig = await getCustomConfig();
|
||||
update.expiredAt = createTempChatExpirationDate(customConfig);
|
||||
} catch (err) {
|
||||
logger.error('Error creating temporary chat expiration date:', err);
|
||||
logger.info(`---\`saveConvo\` context: ${metadata?.context}`);
|
||||
update.expiredAt = null;
|
||||
}
|
||||
} else {
|
||||
update.expiredAt = null;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EToolResources, FileContext } = require('librechat-data-provider');
|
||||
const { EToolResources, FileContext, Constants } = require('librechat-data-provider');
|
||||
const { getProjectByName } = require('./Project');
|
||||
const { getAgent } = require('./Agent');
|
||||
const { File } = require('~/db/models');
|
||||
|
||||
/**
|
||||
@@ -12,17 +14,119 @@ const findFileById = async (file_id, options = {}) => {
|
||||
return await File.findOne({ file_id, ...options }).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a user has access to multiple files through a shared agent (batch operation)
|
||||
* @param {string} userId - The user ID to check access for
|
||||
* @param {string[]} fileIds - Array of file IDs to check
|
||||
* @param {string} agentId - The agent ID that might grant access
|
||||
* @returns {Promise<Map<string, boolean>>} Map of fileId to access status
|
||||
*/
|
||||
const hasAccessToFilesViaAgent = async (userId, fileIds, agentId) => {
|
||||
const accessMap = new Map();
|
||||
|
||||
// Initialize all files as no access
|
||||
fileIds.forEach((fileId) => accessMap.set(fileId, false));
|
||||
|
||||
try {
|
||||
const agent = await getAgent({ id: agentId });
|
||||
|
||||
if (!agent) {
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Check if user is the author - if so, grant access to all files
|
||||
if (agent.author.toString() === userId) {
|
||||
fileIds.forEach((fileId) => accessMap.set(fileId, true));
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Check if agent is shared with the user via projects
|
||||
if (!agent.projectIds || agent.projectIds.length === 0) {
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Check if agent is in global project
|
||||
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, '_id');
|
||||
if (
|
||||
!globalProject ||
|
||||
!agent.projectIds.some((pid) => pid.toString() === globalProject._id.toString())
|
||||
) {
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Agent is globally shared - check if it's collaborative
|
||||
if (!agent.isCollaborative) {
|
||||
return accessMap;
|
||||
}
|
||||
|
||||
// Agent is globally shared and collaborative - check which files are actually attached
|
||||
const attachedFileIds = new Set();
|
||||
if (agent.tool_resources) {
|
||||
for (const [_resourceType, resource] of Object.entries(agent.tool_resources)) {
|
||||
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
|
||||
resource.file_ids.forEach((fileId) => attachedFileIds.add(fileId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Grant access only to files that are attached to this agent
|
||||
fileIds.forEach((fileId) => {
|
||||
if (attachedFileIds.has(fileId)) {
|
||||
accessMap.set(fileId, true);
|
||||
}
|
||||
});
|
||||
|
||||
return accessMap;
|
||||
} catch (error) {
|
||||
logger.error('[hasAccessToFilesViaAgent] Error checking file access:', error);
|
||||
return accessMap;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||
* @param {Object} filter - The filter criteria to apply.
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @param {Object|String} [selectFields={ text: 0 }] - Fields to include/exclude in the query results.
|
||||
* Default excludes the 'text' field.
|
||||
* @param {Object} [options] - Additional options
|
||||
* @param {string} [options.userId] - User ID for access control
|
||||
* @param {string} [options.agentId] - Agent ID that might grant access to files
|
||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => {
|
||||
const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }, options = {}) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
return await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||
const files = await File.find(filter).select(selectFields).sort(sortOptions).lean();
|
||||
|
||||
// If userId and agentId are provided, filter files based on access
|
||||
if (options.userId && options.agentId) {
|
||||
// Collect file IDs that need access check
|
||||
const filesToCheck = [];
|
||||
const ownedFiles = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (file.user && file.user.toString() === options.userId) {
|
||||
ownedFiles.push(file);
|
||||
} else {
|
||||
filesToCheck.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (filesToCheck.length === 0) {
|
||||
return ownedFiles;
|
||||
}
|
||||
|
||||
// Batch check access for all non-owned files
|
||||
const fileIds = filesToCheck.map((f) => f.file_id);
|
||||
const accessMap = await hasAccessToFilesViaAgent(options.userId, fileIds, options.agentId);
|
||||
|
||||
// Filter files based on access
|
||||
const accessibleFiles = filesToCheck.filter((file) => accessMap.get(file.file_id));
|
||||
|
||||
return [...ownedFiles, ...accessibleFiles];
|
||||
}
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -176,4 +280,5 @@ module.exports = {
|
||||
deleteFiles,
|
||||
deleteFileByFilter,
|
||||
batchUpdateFiles,
|
||||
hasAccessToFilesViaAgent,
|
||||
};
|
||||
|
||||
264
api/models/File.spec.js
Normal file
264
api/models/File.spec.js
Normal file
@@ -0,0 +1,264 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const { getFiles, createFile } = require('./File');
|
||||
const { getProjectByName } = require('./Project');
|
||||
const { createAgent } = require('./Agent');
|
||||
|
||||
let File;
|
||||
let Agent;
|
||||
let Project;
|
||||
|
||||
describe('File Access Control', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
File = mongoose.models.File || mongoose.model('File', fileSchema);
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
Project = mongoose.models.Project || mongoose.model('Project', projectSchema);
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await File.deleteMany({});
|
||||
await Agent.deleteMany({});
|
||||
await Project.deleteMany({});
|
||||
});
|
||||
|
||||
describe('hasAccessToFilesViaAgent', () => {
|
||||
it('should efficiently check access for multiple files at once', async () => {
|
||||
const userId = new mongoose.Types.ObjectId().toString();
|
||||
const authorId = new mongoose.Types.ObjectId().toString();
|
||||
const agentId = uuidv4();
|
||||
const fileIds = [uuidv4(), uuidv4(), uuidv4(), uuidv4()];
|
||||
|
||||
// Create files
|
||||
for (const fileId of fileIds) {
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: fileId,
|
||||
filename: `file-${fileId}.txt`,
|
||||
filepath: `/uploads/${fileId}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Create agent with only first two files attached
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
isCollaborative: true,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileIds[0], fileIds[1]],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get or create global project
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
||||
|
||||
// Share agent globally
|
||||
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
|
||||
|
||||
// Check access for all files
|
||||
const { hasAccessToFilesViaAgent } = require('./File');
|
||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
|
||||
|
||||
// Should have access only to the first two files
|
||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||
expect(accessMap.get(fileIds[1])).toBe(true);
|
||||
expect(accessMap.get(fileIds[2])).toBe(false);
|
||||
expect(accessMap.get(fileIds[3])).toBe(false);
|
||||
});
|
||||
|
||||
it('should grant access to all files when user is the agent author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId().toString();
|
||||
const agentId = uuidv4();
|
||||
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
|
||||
|
||||
// Create agent
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileIds[0]], // Only one file attached
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Check access as the author
|
||||
const { hasAccessToFilesViaAgent } = require('./File');
|
||||
const accessMap = await hasAccessToFilesViaAgent(authorId, fileIds, agentId);
|
||||
|
||||
// Author should have access to all files
|
||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||
expect(accessMap.get(fileIds[1])).toBe(true);
|
||||
expect(accessMap.get(fileIds[2])).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle non-existent agent gracefully', async () => {
|
||||
const userId = new mongoose.Types.ObjectId().toString();
|
||||
const fileIds = [uuidv4(), uuidv4()];
|
||||
|
||||
const { hasAccessToFilesViaAgent } = require('./File');
|
||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, 'non-existent-agent');
|
||||
|
||||
// Should have no access to any files
|
||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access when agent is not collaborative', async () => {
|
||||
const userId = new mongoose.Types.ObjectId().toString();
|
||||
const authorId = new mongoose.Types.ObjectId().toString();
|
||||
const agentId = uuidv4();
|
||||
const fileIds = [uuidv4(), uuidv4()];
|
||||
|
||||
// Create agent with files but isCollaborative: false
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Non-Collaborative Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
isCollaborative: false,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: fileIds,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Get or create global project
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
||||
|
||||
// Share agent globally
|
||||
await Agent.updateOne({ id: agentId }, { $push: { projectIds: globalProject._id } });
|
||||
|
||||
// Check access for files
|
||||
const { hasAccessToFilesViaAgent } = require('./File');
|
||||
const accessMap = await hasAccessToFilesViaAgent(userId, fileIds, agentId);
|
||||
|
||||
// Should have no access to any files when isCollaborative is false
|
||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFiles with agent access control', () => {
|
||||
test('should return files owned by user and files accessible through agent', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const ownedFileId = `file_${uuidv4()}`;
|
||||
const sharedFileId = `file_${uuidv4()}`;
|
||||
const inaccessibleFileId = `file_${uuidv4()}`;
|
||||
|
||||
// Create/get global project using getProjectByName which will upsert
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME);
|
||||
|
||||
// Create agent with shared file
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Shared Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
projectIds: [globalProject._id],
|
||||
isCollaborative: true,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [sharedFileId],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Create files
|
||||
await createFile({
|
||||
file_id: ownedFileId,
|
||||
user: userId,
|
||||
filename: 'owned.txt',
|
||||
filepath: '/uploads/owned.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 100,
|
||||
});
|
||||
|
||||
await createFile({
|
||||
file_id: sharedFileId,
|
||||
user: authorId,
|
||||
filename: 'shared.txt',
|
||||
filepath: '/uploads/shared.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 200,
|
||||
embedded: true,
|
||||
});
|
||||
|
||||
await createFile({
|
||||
file_id: inaccessibleFileId,
|
||||
user: authorId,
|
||||
filename: 'inaccessible.txt',
|
||||
filepath: '/uploads/inaccessible.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 300,
|
||||
});
|
||||
|
||||
// Get files with access control
|
||||
const files = await getFiles(
|
||||
{ file_id: { $in: [ownedFileId, sharedFileId, inaccessibleFileId] } },
|
||||
null,
|
||||
{ text: 0 },
|
||||
{ userId: userId.toString(), agentId },
|
||||
);
|
||||
|
||||
expect(files).toHaveLength(2);
|
||||
expect(files.map((f) => f.file_id)).toContain(ownedFileId);
|
||||
expect(files.map((f) => f.file_id)).toContain(sharedFileId);
|
||||
expect(files.map((f) => f.file_id)).not.toContain(inaccessibleFileId);
|
||||
});
|
||||
|
||||
test('should return all files when no userId/agentId provided', async () => {
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const fileId1 = `file_${uuidv4()}`;
|
||||
const fileId2 = `file_${uuidv4()}`;
|
||||
|
||||
await createFile({
|
||||
file_id: fileId1,
|
||||
user: userId,
|
||||
filename: 'file1.txt',
|
||||
filepath: '/uploads/file1.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 100,
|
||||
});
|
||||
|
||||
await createFile({
|
||||
file_id: fileId2,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
filename: 'file2.txt',
|
||||
filepath: '/uploads/file2.txt',
|
||||
type: 'text/plain',
|
||||
bytes: 200,
|
||||
});
|
||||
|
||||
const files = await getFiles({ file_id: { $in: [fileId1, fileId2] } });
|
||||
expect(files).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,7 @@
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const getCustomConfig = require('~/server/services/Config/getCustomConfig');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
const idSchema = z.string().uuid();
|
||||
@@ -54,9 +56,14 @@ async function saveMessage(req, params, metadata) {
|
||||
};
|
||||
|
||||
if (req?.body?.isTemporary) {
|
||||
const expiredAt = new Date();
|
||||
expiredAt.setDate(expiredAt.getDate() + 30);
|
||||
update.expiredAt = expiredAt;
|
||||
try {
|
||||
const customConfig = await getCustomConfig();
|
||||
update.expiredAt = createTempChatExpirationDate(customConfig);
|
||||
} catch (err) {
|
||||
logger.error('Error creating temporary chat expiration date:', err);
|
||||
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
|
||||
update.expiredAt = null;
|
||||
}
|
||||
} else {
|
||||
update.expiredAt = null;
|
||||
}
|
||||
|
||||
@@ -135,10 +135,11 @@ const tokenValues = Object.assign(
|
||||
'grok-2-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-3-mini-fast': { prompt: 0.4, completion: 4 },
|
||||
'grok-3-mini-fast': { prompt: 0.6, completion: 4 },
|
||||
'grok-3-mini': { prompt: 0.3, completion: 0.5 },
|
||||
'grok-3-fast': { prompt: 5.0, completion: 25.0 },
|
||||
'grok-3': { prompt: 3.0, completion: 15.0 },
|
||||
'grok-4': { prompt: 3.0, completion: 15.0 },
|
||||
'grok-beta': { prompt: 5.0, completion: 15.0 },
|
||||
'mistral-large': { prompt: 2.0, completion: 6.0 },
|
||||
'pixtral-large': { prompt: 2.0, completion: 6.0 },
|
||||
|
||||
@@ -636,6 +636,15 @@ describe('Grok Model Tests - Pricing', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok 4 model', () => {
|
||||
expect(getMultiplier({ model: 'grok-4-0709', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'grok-4-0709', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-4'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok 3 models with prefixes', () => {
|
||||
expect(getMultiplier({ model: 'xai/grok-3', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-3'].prompt,
|
||||
@@ -662,6 +671,15 @@ describe('Grok Model Tests - Pricing', () => {
|
||||
tokenValues['grok-3-mini-fast'].completion,
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok 4 model with prefixes', () => {
|
||||
expect(getMultiplier({ model: 'xai/grok-4-0709', tokenType: 'prompt' })).toBe(
|
||||
tokenValues['grok-4'].prompt,
|
||||
);
|
||||
expect(getMultiplier({ model: 'xai/grok-4-0709', tokenType: 'completion' })).toBe(
|
||||
tokenValues['grok-4'].completion,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.7.8",
|
||||
"version": "v0.7.9-rc1",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -48,7 +48,7 @@
|
||||
"@langchain/google-genai": "^0.2.13",
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.42",
|
||||
"@librechat/agents": "^2.4.59",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@node-saml/passport-saml": "^5.0.0",
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const openIdClient = require('openid-client');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
registerUser,
|
||||
resetPassword,
|
||||
setAuthTokens,
|
||||
requestPasswordReset,
|
||||
setOpenIDAuthTokens,
|
||||
resetPassword,
|
||||
setAuthTokens,
|
||||
registerUser,
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const {
|
||||
handleAbortError,
|
||||
@@ -10,9 +12,8 @@ const {
|
||||
clientRegistry,
|
||||
requestDataMap,
|
||||
} = require('~/server/cleanup');
|
||||
const { sendMessage, createOnProgress } = require('~/server/utils');
|
||||
const { createOnProgress } = require('~/server/utils');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const EditController = async (req, res, next, initializeClient) => {
|
||||
let {
|
||||
@@ -198,7 +199,7 @@ const EditController = async (req, res, next, initializeClient) => {
|
||||
const finalUserMessage = reqDataContext.userMessage;
|
||||
const finalResponseMessage = { ...response };
|
||||
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
|
||||
@@ -24,17 +24,23 @@ const handleValidationError = (err, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
module.exports = (err, req, res, next) => {
|
||||
module.exports = (err, _req, res, _next) => {
|
||||
try {
|
||||
if (err.name === 'ValidationError') {
|
||||
return (err = handleValidationError(err, res));
|
||||
return handleValidationError(err, res);
|
||||
}
|
||||
if (err.code && err.code == 11000) {
|
||||
return (err = handleDuplicateKeyError(err, res));
|
||||
return handleDuplicateKeyError(err, res);
|
||||
}
|
||||
} catch (err) {
|
||||
// Special handling for errors like SyntaxError
|
||||
if (err.statusCode && err.body) {
|
||||
return res.status(err.statusCode).send(err.body);
|
||||
}
|
||||
|
||||
logger.error('ErrorController => error', err);
|
||||
res.status(500).send('An unknown error occurred.');
|
||||
return res.status(500).send('An unknown error occurred.');
|
||||
} catch (err) {
|
||||
logger.error('ErrorController => processing error', err);
|
||||
return res.status(500).send('Processing error in ErrorController.');
|
||||
}
|
||||
};
|
||||
|
||||
241
api/server/controllers/ErrorController.spec.js
Normal file
241
api/server/controllers/ErrorController.spec.js
Normal file
@@ -0,0 +1,241 @@
|
||||
const errorController = require('./ErrorController');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
// Mock the logger
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('ErrorController', () => {
|
||||
let mockReq, mockRes, mockNext;
|
||||
|
||||
beforeEach(() => {
|
||||
mockReq = {};
|
||||
mockRes = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
send: jest.fn(),
|
||||
};
|
||||
mockNext = jest.fn();
|
||||
logger.error.mockClear();
|
||||
});
|
||||
|
||||
describe('ValidationError handling', () => {
|
||||
it('should handle ValidationError with single error', () => {
|
||||
const validationError = {
|
||||
name: 'ValidationError',
|
||||
errors: {
|
||||
email: { message: 'Email is required', path: 'email' },
|
||||
},
|
||||
};
|
||||
|
||||
errorController(validationError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: '["Email is required"]',
|
||||
fields: '["email"]',
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith('Validation error:', validationError.errors);
|
||||
});
|
||||
|
||||
it('should handle ValidationError with multiple errors', () => {
|
||||
const validationError = {
|
||||
name: 'ValidationError',
|
||||
errors: {
|
||||
email: { message: 'Email is required', path: 'email' },
|
||||
password: { message: 'Password is required', path: 'password' },
|
||||
},
|
||||
};
|
||||
|
||||
errorController(validationError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: '"Email is required Password is required"',
|
||||
fields: '["email","password"]',
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith('Validation error:', validationError.errors);
|
||||
});
|
||||
|
||||
it('should handle ValidationError with empty errors object', () => {
|
||||
const validationError = {
|
||||
name: 'ValidationError',
|
||||
errors: {},
|
||||
};
|
||||
|
||||
errorController(validationError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: '[]',
|
||||
fields: '[]',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Duplicate key error handling', () => {
|
||||
it('should handle duplicate key error (code 11000)', () => {
|
||||
const duplicateKeyError = {
|
||||
code: 11000,
|
||||
keyValue: { email: 'test@example.com' },
|
||||
};
|
||||
|
||||
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(409);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: 'An document with that ["email"] already exists.',
|
||||
fields: '["email"]',
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith('Duplicate key error:', duplicateKeyError.keyValue);
|
||||
});
|
||||
|
||||
it('should handle duplicate key error with multiple fields', () => {
|
||||
const duplicateKeyError = {
|
||||
code: 11000,
|
||||
keyValue: { email: 'test@example.com', username: 'testuser' },
|
||||
};
|
||||
|
||||
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(409);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: 'An document with that ["email","username"] already exists.',
|
||||
fields: '["email","username"]',
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith('Duplicate key error:', duplicateKeyError.keyValue);
|
||||
});
|
||||
|
||||
it('should handle error with code 11000 as string', () => {
|
||||
const duplicateKeyError = {
|
||||
code: '11000',
|
||||
keyValue: { email: 'test@example.com' },
|
||||
};
|
||||
|
||||
errorController(duplicateKeyError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(409);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({
|
||||
messages: 'An document with that ["email"] already exists.',
|
||||
fields: '["email"]',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SyntaxError handling', () => {
|
||||
it('should handle errors with statusCode and body', () => {
|
||||
const syntaxError = {
|
||||
statusCode: 400,
|
||||
body: 'Invalid JSON syntax',
|
||||
};
|
||||
|
||||
errorController(syntaxError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('Invalid JSON syntax');
|
||||
});
|
||||
|
||||
it('should handle errors with different statusCode and body', () => {
|
||||
const customError = {
|
||||
statusCode: 422,
|
||||
body: { error: 'Unprocessable entity' },
|
||||
};
|
||||
|
||||
errorController(customError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(422);
|
||||
expect(mockRes.send).toHaveBeenCalledWith({ error: 'Unprocessable entity' });
|
||||
});
|
||||
|
||||
it('should handle error with statusCode but no body', () => {
|
||||
const partialError = {
|
||||
statusCode: 400,
|
||||
};
|
||||
|
||||
errorController(partialError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
|
||||
});
|
||||
|
||||
it('should handle error with body but no statusCode', () => {
|
||||
const partialError = {
|
||||
body: 'Some error message',
|
||||
};
|
||||
|
||||
errorController(partialError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unknown error handling', () => {
|
||||
it('should handle unknown errors', () => {
|
||||
const unknownError = new Error('Some unknown error');
|
||||
|
||||
errorController(unknownError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
|
||||
expect(logger.error).toHaveBeenCalledWith('ErrorController => error', unknownError);
|
||||
});
|
||||
|
||||
it('should handle errors with code other than 11000', () => {
|
||||
const mongoError = {
|
||||
code: 11100,
|
||||
message: 'Some MongoDB error',
|
||||
};
|
||||
|
||||
errorController(mongoError, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('An unknown error occurred.');
|
||||
expect(logger.error).toHaveBeenCalledWith('ErrorController => error', mongoError);
|
||||
});
|
||||
|
||||
it('should handle null/undefined errors', () => {
|
||||
errorController(null, mockReq, mockRes, mockNext);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(mockRes.send).toHaveBeenCalledWith('Processing error in ErrorController.');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'ErrorController => processing error',
|
||||
expect.any(Error),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Catch block handling', () => {
|
||||
beforeEach(() => {
|
||||
// Restore logger mock to normal behavior for these tests
|
||||
logger.error.mockRestore();
|
||||
logger.error = jest.fn();
|
||||
});
|
||||
|
||||
it('should handle errors when logger.error throws', () => {
|
||||
// Create fresh mocks for this test
|
||||
const freshMockRes = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
send: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock logger to throw on the first call, succeed on the second
|
||||
logger.error
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error('Logger error');
|
||||
})
|
||||
.mockImplementation(() => {});
|
||||
|
||||
const testError = new Error('Test error');
|
||||
|
||||
errorController(testError, mockReq, freshMockRes, mockNext);
|
||||
|
||||
expect(freshMockRes.status).toHaveBeenCalledWith(500);
|
||||
expect(freshMockRes.send).toHaveBeenCalledWith('Processing error in ErrorController.');
|
||||
expect(logger.error).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,11 +1,10 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, AuthType } = require('librechat-data-provider');
|
||||
const { CacheKeys, AuthType, Constants } = require('librechat-data-provider');
|
||||
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
|
||||
const { getToolkitKey } = require('~/server/services/ToolService');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Filters out duplicate plugins from the list of plugins.
|
||||
@@ -140,9 +139,9 @@ function createGetServerTools() {
|
||||
const getAvailableTools = async (req, res) => {
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cachedTools = await cache.get(CacheKeys.TOOLS);
|
||||
if (cachedTools) {
|
||||
res.status(200).json(cachedTools);
|
||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||
if (cachedToolsArray) {
|
||||
res.status(200).json(cachedToolsArray);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -173,7 +172,7 @@ const getAvailableTools = async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
const toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||
const toolDefinitions = (await getCachedTools({ includeGlobal: true })) || {};
|
||||
|
||||
const toolsOutput = [];
|
||||
for (const plugin of authenticatedPlugins) {
|
||||
|
||||
195
api/server/controllers/agents/__tests__/v1.spec.js
Normal file
195
api/server/controllers/agents/__tests__/v1.spec.js
Normal file
@@ -0,0 +1,195 @@
|
||||
const { duplicateAgent } = require('../v1');
|
||||
const { getAgent, createAgent } = require('~/models/Agent');
|
||||
const { getActions } = require('~/models/Action');
|
||||
const { nanoid } = require('nanoid');
|
||||
|
||||
jest.mock('~/models/Agent');
|
||||
jest.mock('~/models/Action');
|
||||
jest.mock('nanoid');
|
||||
|
||||
describe('duplicateAgent', () => {
|
||||
let req, res;
|
||||
|
||||
beforeEach(() => {
|
||||
req = {
|
||||
params: { id: 'agent_123' },
|
||||
user: { id: 'user_456' },
|
||||
};
|
||||
res = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn(),
|
||||
};
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should duplicate an agent successfully', async () => {
|
||||
const mockAgent = {
|
||||
id: 'agent_123',
|
||||
name: 'Test Agent',
|
||||
description: 'Test Description',
|
||||
instructions: 'Test Instructions',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
tools: ['file_search'],
|
||||
actions: [],
|
||||
author: 'user_789',
|
||||
versions: [{ name: 'Test Agent', version: 1 }],
|
||||
__v: 0,
|
||||
};
|
||||
|
||||
const mockNewAgent = {
|
||||
id: 'agent_new_123',
|
||||
name: 'Test Agent (1/2/23, 12:34)',
|
||||
description: 'Test Description',
|
||||
instructions: 'Test Instructions',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
tools: ['file_search'],
|
||||
actions: [],
|
||||
author: 'user_456',
|
||||
versions: [
|
||||
{
|
||||
name: 'Test Agent (1/2/23, 12:34)',
|
||||
description: 'Test Description',
|
||||
instructions: 'Test Instructions',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
tools: ['file_search'],
|
||||
actions: [],
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
getAgent.mockResolvedValue(mockAgent);
|
||||
getActions.mockResolvedValue([]);
|
||||
nanoid.mockReturnValue('new_123');
|
||||
createAgent.mockResolvedValue(mockNewAgent);
|
||||
|
||||
await duplicateAgent(req, res);
|
||||
|
||||
expect(getAgent).toHaveBeenCalledWith({ id: 'agent_123' });
|
||||
expect(getActions).toHaveBeenCalledWith({ agent_id: 'agent_123' }, true);
|
||||
expect(createAgent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: 'agent_new_123',
|
||||
author: 'user_456',
|
||||
name: expect.stringContaining('Test Agent ('),
|
||||
description: 'Test Description',
|
||||
instructions: 'Test Instructions',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
tools: ['file_search'],
|
||||
actions: [],
|
||||
}),
|
||||
);
|
||||
|
||||
expect(createAgent).toHaveBeenCalledWith(
|
||||
expect.not.objectContaining({
|
||||
versions: expect.anything(),
|
||||
__v: expect.anything(),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(201);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
agent: mockNewAgent,
|
||||
actions: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should ensure duplicated agent has clean versions array without nested fields', async () => {
|
||||
const mockAgent = {
|
||||
id: 'agent_123',
|
||||
name: 'Test Agent',
|
||||
description: 'Test Description',
|
||||
versions: [
|
||||
{
|
||||
name: 'Test Agent',
|
||||
versions: [{ name: 'Nested' }],
|
||||
__v: 1,
|
||||
},
|
||||
],
|
||||
__v: 2,
|
||||
};
|
||||
|
||||
const mockNewAgent = {
|
||||
id: 'agent_new_123',
|
||||
name: 'Test Agent (1/2/23, 12:34)',
|
||||
description: 'Test Description',
|
||||
versions: [
|
||||
{
|
||||
name: 'Test Agent (1/2/23, 12:34)',
|
||||
description: 'Test Description',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
getAgent.mockResolvedValue(mockAgent);
|
||||
getActions.mockResolvedValue([]);
|
||||
nanoid.mockReturnValue('new_123');
|
||||
createAgent.mockResolvedValue(mockNewAgent);
|
||||
|
||||
await duplicateAgent(req, res);
|
||||
|
||||
expect(mockNewAgent.versions).toHaveLength(1);
|
||||
|
||||
const firstVersion = mockNewAgent.versions[0];
|
||||
expect(firstVersion).not.toHaveProperty('versions');
|
||||
expect(firstVersion).not.toHaveProperty('__v');
|
||||
|
||||
expect(mockNewAgent).not.toHaveProperty('__v');
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(201);
|
||||
});
|
||||
|
||||
it('should return 404 if agent not found', async () => {
|
||||
getAgent.mockResolvedValue(null);
|
||||
|
||||
await duplicateAgent(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(404);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Agent not found',
|
||||
status: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle tool_resources.ocr correctly', async () => {
|
||||
const mockAgent = {
|
||||
id: 'agent_123',
|
||||
name: 'Test Agent',
|
||||
tool_resources: {
|
||||
ocr: { enabled: true, config: 'test' },
|
||||
other: { should: 'not be copied' },
|
||||
},
|
||||
};
|
||||
|
||||
getAgent.mockResolvedValue(mockAgent);
|
||||
getActions.mockResolvedValue([]);
|
||||
nanoid.mockReturnValue('new_123');
|
||||
createAgent.mockResolvedValue({ id: 'agent_new_123' });
|
||||
|
||||
await duplicateAgent(req, res);
|
||||
|
||||
expect(createAgent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tool_resources: {
|
||||
ocr: { enabled: true, config: 'test' },
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
getAgent.mockRejectedValue(new Error('Database error'));
|
||||
|
||||
await duplicateAgent(req, res);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({ error: 'Database error' });
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,12 @@
|
||||
require('events').EventEmitter.defaultMaxListeners = 100;
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const {
|
||||
sendEvent,
|
||||
createRun,
|
||||
Tokenizer,
|
||||
checkAccess,
|
||||
memoryInstructions,
|
||||
createMemoryProcessor,
|
||||
} = require('@librechat/api');
|
||||
@@ -30,20 +33,35 @@ const {
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const { createGetMCPAuthMap, checkCapability } = require('~/server/services/Config');
|
||||
const {
|
||||
findPluginAuthsByKeys,
|
||||
getFormattedMemories,
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { getMCPAuthMap, checkCapability, hasCustomUserVars } = require('~/server/services/Config');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { checkAccess } = require('~/server/middleware/roles/access');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
const { getMCPManager } = require('~/config');
|
||||
|
||||
const omitTitleOptions = new Set([
|
||||
'stream',
|
||||
'thinking',
|
||||
'streaming',
|
||||
'clientOptions',
|
||||
'thinkingConfig',
|
||||
'thinkingBudget',
|
||||
'includeThoughts',
|
||||
'maxOutputTokens',
|
||||
'additionalModelRequestFields',
|
||||
]);
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {Agent} agent
|
||||
@@ -390,7 +408,12 @@ class AgentClient extends BaseClient {
|
||||
if (user.personalization?.memories === false) {
|
||||
return;
|
||||
}
|
||||
const hasAccess = await checkAccess(user, PermissionTypes.MEMORIES, [Permissions.USE]);
|
||||
const hasAccess = await checkAccess({
|
||||
user,
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.debug(
|
||||
@@ -508,7 +531,10 @@ class AgentClient extends BaseClient {
|
||||
messagesToProcess = [...messages.slice(-messageWindowSize)];
|
||||
}
|
||||
}
|
||||
return await this.processMemory(messagesToProcess);
|
||||
|
||||
const bufferString = getBufferString(messagesToProcess);
|
||||
const bufferMessage = new HumanMessage(`# Current Chat:\n\n${bufferString}`);
|
||||
return await this.processMemory([bufferMessage]);
|
||||
} catch (error) {
|
||||
logger.error('Memory Agent failed to process memory', error);
|
||||
}
|
||||
@@ -680,8 +706,6 @@ class AgentClient extends BaseClient {
|
||||
version: 'v2',
|
||||
};
|
||||
|
||||
const getUserMCPAuthMap = await createGetMCPAuthMap();
|
||||
|
||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||
payload,
|
||||
@@ -802,10 +826,11 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
try {
|
||||
if (getUserMCPAuthMap) {
|
||||
config.configurable.userMCPAuthMap = await getUserMCPAuthMap({
|
||||
if (await hasCustomUserVars()) {
|
||||
config.configurable.userMCPAuthMap = await getMCPAuthMap({
|
||||
tools: agent.tools,
|
||||
userId: this.options.req.user.id,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -1023,6 +1048,12 @@ class AgentClient extends BaseClient {
|
||||
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||
) {
|
||||
provider = Providers.OPENAI;
|
||||
} else if (
|
||||
endpoint === EModelEndpoint.azureOpenAI &&
|
||||
options.llmConfig?.azureOpenAIApiInstanceName != null &&
|
||||
provider !== Providers.AZURE
|
||||
) {
|
||||
provider = Providers.AZURE;
|
||||
}
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
@@ -1038,6 +1069,16 @@ class AgentClient extends BaseClient {
|
||||
delete clientOptions.maxTokens;
|
||||
}
|
||||
|
||||
clientOptions = Object.assign(
|
||||
Object.fromEntries(
|
||||
Object.entries(clientOptions).filter(([key]) => !omitTitleOptions.has(key)),
|
||||
),
|
||||
);
|
||||
|
||||
if (provider === Providers.GOOGLE) {
|
||||
clientOptions.json = true;
|
||||
}
|
||||
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
provider,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// errorHandler.js
|
||||
const { logger } = require('~/config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, ViolationTypes } = require('librechat-data-provider');
|
||||
const { sendResponse } = require('~/server/middleware/error');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const { sendResponse } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerContext
|
||||
@@ -75,7 +75,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === 'azureAssistants'
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
handleAbortError,
|
||||
@@ -5,17 +7,18 @@ const {
|
||||
cleanupAbortController,
|
||||
} = require('~/server/middleware');
|
||||
const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup');
|
||||
const { sendMessage } = require('~/server/utils');
|
||||
const { saveMessage } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let {
|
||||
text,
|
||||
endpointOption,
|
||||
conversationId,
|
||||
isContinued = false,
|
||||
editedContent = null,
|
||||
parentMessageId = null,
|
||||
overrideParentMessageId = null,
|
||||
responseMessageId: editedResponseMessageId = null,
|
||||
} = req.body;
|
||||
|
||||
let sender;
|
||||
@@ -67,7 +70,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
handler();
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
logger.error('[AgentController] Error in cleanup handler', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -155,7 +158,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
try {
|
||||
res.removeListener('close', closeHandler);
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
logger.error('[AgentController] Error removing close listener', e);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -163,10 +166,14 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
user: userId,
|
||||
onStart,
|
||||
getReqData,
|
||||
isContinued,
|
||||
editedContent,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
abortController,
|
||||
overrideParentMessageId,
|
||||
isEdited: !!editedContent,
|
||||
responseMessageId: editedResponseMessageId,
|
||||
progressOptions: {
|
||||
res,
|
||||
},
|
||||
@@ -206,7 +213,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
// Create a new response object with minimal copies
|
||||
const finalResponse = { ...response };
|
||||
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
const { z } = require('zod');
|
||||
const fs = require('fs').promises;
|
||||
const { nanoid } = require('nanoid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { agentCreateSchema, agentUpdateSchema } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
@@ -8,6 +10,7 @@ const {
|
||||
SystemRoles,
|
||||
EToolResources,
|
||||
actionDelimiter,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getAgent,
|
||||
@@ -30,6 +33,7 @@ const { deleteFileByFilter } = require('~/models/File');
|
||||
const systemTools = {
|
||||
[Tools.execute_code]: true,
|
||||
[Tools.file_search]: true,
|
||||
[Tools.web_search]: true,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -42,9 +46,13 @@ const systemTools = {
|
||||
*/
|
||||
const createAgentHandler = async (req, res) => {
|
||||
try {
|
||||
const { tools = [], provider, name, description, instructions, model, ...agentData } = req.body;
|
||||
const validatedData = agentCreateSchema.parse(req.body);
|
||||
const { tools = [], ...agentData } = removeNullishValues(validatedData);
|
||||
|
||||
const { id: userId } = req.user;
|
||||
|
||||
agentData.id = `agent_${nanoid()}`;
|
||||
agentData.author = userId;
|
||||
agentData.tools = [];
|
||||
|
||||
const availableTools = await getCachedTools({ includeGlobal: true });
|
||||
@@ -58,19 +66,13 @@ const createAgentHandler = async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(agentData, {
|
||||
author: userId,
|
||||
name,
|
||||
description,
|
||||
instructions,
|
||||
provider,
|
||||
model,
|
||||
});
|
||||
|
||||
agentData.id = `agent_${nanoid()}`;
|
||||
const agent = await createAgent(agentData);
|
||||
res.status(201).json(agent);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error('[/Agents] Validation error', error.errors);
|
||||
return res.status(400).json({ error: 'Invalid request data', details: error.errors });
|
||||
}
|
||||
logger.error('[/Agents] Error creating agent', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
@@ -154,14 +156,16 @@ const getAgentHandler = async (req, res) => {
|
||||
const updateAgentHandler = async (req, res) => {
|
||||
try {
|
||||
const id = req.params.id;
|
||||
const { projectIds, removeProjectIds, ...updateData } = req.body;
|
||||
const validatedData = agentUpdateSchema.parse(req.body);
|
||||
const { projectIds, removeProjectIds, ...updateData } = removeNullishValues(validatedData);
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
@@ -200,6 +204,11 @@ const updateAgentHandler = async (req, res) => {
|
||||
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error('[/Agents/:id] Validation error', error.errors);
|
||||
return res.status(400).json({ error: 'Invalid request data', details: error.errors });
|
||||
}
|
||||
|
||||
logger.error('[/Agents/:id] Error updating Agent', error);
|
||||
|
||||
if (error.statusCode === 409) {
|
||||
@@ -242,6 +251,8 @@ const duplicateAgentHandler = async (req, res) => {
|
||||
createdAt: _createdAt,
|
||||
updatedAt: _updatedAt,
|
||||
tool_resources: _tool_resources = {},
|
||||
versions: _versions,
|
||||
__v: _v,
|
||||
...cloneData
|
||||
} = agent;
|
||||
cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', {
|
||||
@@ -380,6 +391,22 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
return res.status(400).json({ message: 'Agent ID is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id: agent_id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const buffer = await fs.readFile(req.file.path);
|
||||
|
||||
const fileStrategy = req.app.locals.fileStrategy;
|
||||
@@ -402,14 +429,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
source: fileStrategy,
|
||||
};
|
||||
|
||||
let _avatar;
|
||||
try {
|
||||
const agent = await getAgent({ id: agent_id });
|
||||
_avatar = agent.avatar;
|
||||
} catch (error) {
|
||||
logger.error('[/:agent_id/avatar] Error fetching agent', error);
|
||||
_avatar = {};
|
||||
}
|
||||
let _avatar = existingAgent.avatar;
|
||||
|
||||
if (_avatar && _avatar.source) {
|
||||
const { deleteFile } = getStrategyFunctions(_avatar.source);
|
||||
@@ -431,7 +451,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||
};
|
||||
|
||||
promises.push(
|
||||
await updateAgent({ id: agent_id, author: req.user.id }, data, {
|
||||
await updateAgent({ id: agent_id }, data, {
|
||||
updatingUserId: req.user.id,
|
||||
}),
|
||||
);
|
||||
|
||||
659
api/server/controllers/agents/v1.spec.js
Normal file
659
api/server/controllers/agents/v1.spec.js
Normal file
@@ -0,0 +1,659 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
|
||||
// Only mock the dependencies that are not database-related
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCachedTools: jest.fn().mockResolvedValue({
|
||||
web_search: true,
|
||||
execute_code: true,
|
||||
file_search: true,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/Project', () => ({
|
||||
getProjectByName: jest.fn().mockResolvedValue(null),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/images/avatar', () => ({
|
||||
resizeAvatar: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/S3/crud', () => ({
|
||||
refreshS3Url: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/process', () => ({
|
||||
filterFile: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/Action', () => ({
|
||||
updateAction: jest.fn(),
|
||||
getActions: jest.fn().mockResolvedValue([]),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/File', () => ({
|
||||
deleteFileByFilter: jest.fn(),
|
||||
}));
|
||||
|
||||
const { createAgent: createAgentHandler, updateAgent: updateAgentHandler } = require('./v1');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}
|
||||
*/
|
||||
let Agent;
|
||||
|
||||
describe('Agent Controllers - Mass Assignment Protection', () => {
|
||||
let mongoServer;
|
||||
let mockReq;
|
||||
let mockRes;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema);
|
||||
}, 20000);
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
|
||||
// Reset all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Setup mock request and response objects
|
||||
mockReq = {
|
||||
user: {
|
||||
id: new mongoose.Types.ObjectId().toString(),
|
||||
role: 'USER',
|
||||
},
|
||||
body: {},
|
||||
params: {},
|
||||
app: {
|
||||
locals: {
|
||||
fileStrategy: 'local',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockRes = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn().mockReturnThis(),
|
||||
};
|
||||
});
|
||||
|
||||
describe('createAgentHandler', () => {
|
||||
test('should create agent with allowed fields only', async () => {
|
||||
const validData = {
|
||||
name: 'Test Agent',
|
||||
description: 'A test agent',
|
||||
instructions: 'Be helpful',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
tools: ['web_search'],
|
||||
model_parameters: { temperature: 0.7 },
|
||||
tool_resources: {
|
||||
file_search: { file_ids: ['file1', 'file2'] },
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = validData;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(createdAgent.name).toBe('Test Agent');
|
||||
expect(createdAgent.description).toBe('A test agent');
|
||||
expect(createdAgent.provider).toBe('openai');
|
||||
expect(createdAgent.model).toBe('gpt-4');
|
||||
expect(createdAgent.author.toString()).toBe(mockReq.user.id);
|
||||
expect(createdAgent.tools).toContain('web_search');
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||
expect(agentInDb).toBeDefined();
|
||||
expect(agentInDb.name).toBe('Test Agent');
|
||||
expect(agentInDb.author.toString()).toBe(mockReq.user.id);
|
||||
});
|
||||
|
||||
test('should reject creation with unauthorized fields (mass assignment protection)', async () => {
|
||||
const maliciousData = {
|
||||
// Required fields
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Malicious Agent',
|
||||
|
||||
// Unauthorized fields that should be stripped
|
||||
author: new mongoose.Types.ObjectId().toString(), // Should not be able to set author
|
||||
authorName: 'Hacker', // Should be stripped
|
||||
isCollaborative: true, // Should be stripped on creation
|
||||
versions: [], // Should be stripped
|
||||
_id: new mongoose.Types.ObjectId(), // Should be stripped
|
||||
id: 'custom_agent_id', // Should be overridden
|
||||
createdAt: new Date('2020-01-01'), // Should be stripped
|
||||
updatedAt: new Date('2020-01-01'), // Should be stripped
|
||||
};
|
||||
|
||||
mockReq.body = maliciousData;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify unauthorized fields were not set
|
||||
expect(createdAgent.author.toString()).toBe(mockReq.user.id); // Should be the request user, not the malicious value
|
||||
expect(createdAgent.authorName).toBeUndefined();
|
||||
expect(createdAgent.isCollaborative).toBeFalsy();
|
||||
expect(createdAgent.versions).toHaveLength(1); // Should have exactly 1 version from creation
|
||||
expect(createdAgent.id).not.toBe('custom_agent_id'); // Should have generated ID
|
||||
expect(createdAgent.id).toMatch(/^agent_/); // Should have proper prefix
|
||||
|
||||
// Verify timestamps are recent (not the malicious dates)
|
||||
const createdTime = new Date(createdAgent.createdAt).getTime();
|
||||
const now = Date.now();
|
||||
expect(now - createdTime).toBeLessThan(5000); // Created within last 5 seconds
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||
expect(agentInDb.author.toString()).toBe(mockReq.user.id);
|
||||
expect(agentInDb.authorName).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should validate required fields', async () => {
|
||||
const invalidData = {
|
||||
name: 'Missing Required Fields',
|
||||
// Missing provider and model
|
||||
};
|
||||
|
||||
mockReq.body = invalidData;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: 'Invalid request data',
|
||||
details: expect.any(Array),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify nothing was created in database
|
||||
const count = await Agent.countDocuments();
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
test('should handle tool_resources validation', async () => {
|
||||
const dataWithInvalidToolResources = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Tool Resources',
|
||||
tool_resources: {
|
||||
// Valid resources
|
||||
file_search: {
|
||||
file_ids: ['file1', 'file2'],
|
||||
vector_store_ids: ['vs1'],
|
||||
},
|
||||
execute_code: {
|
||||
file_ids: ['file3'],
|
||||
},
|
||||
// Invalid resource (should be stripped by schema)
|
||||
invalid_resource: {
|
||||
file_ids: ['file4'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = dataWithInvalidToolResources;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(createdAgent.tool_resources).toBeDefined();
|
||||
expect(createdAgent.tool_resources.file_search).toBeDefined();
|
||||
expect(createdAgent.tool_resources.execute_code).toBeDefined();
|
||||
expect(createdAgent.tool_resources.invalid_resource).toBeUndefined(); // Should be stripped
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||
expect(agentInDb.tool_resources.invalid_resource).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle avatar validation', async () => {
|
||||
const dataWithAvatar = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Avatar',
|
||||
avatar: {
|
||||
filepath: 'https://example.com/avatar.png',
|
||||
source: 's3',
|
||||
},
|
||||
};
|
||||
|
||||
mockReq.body = dataWithAvatar;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(createdAgent.avatar).toEqual({
|
||||
filepath: 'https://example.com/avatar.png',
|
||||
source: 's3',
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle invalid avatar format', async () => {
|
||||
const dataWithInvalidAvatar = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Agent with Invalid Avatar',
|
||||
avatar: 'just-a-string', // Invalid format
|
||||
};
|
||||
|
||||
mockReq.body = dataWithInvalidAvatar;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: 'Invalid request data',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateAgentHandler', () => {
|
||||
let existingAgentId;
|
||||
let existingAgentAuthorId;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create an existing agent for update tests
|
||||
existingAgentAuthorId = new mongoose.Types.ObjectId();
|
||||
const agent = await Agent.create({
|
||||
id: `agent_${uuidv4()}`,
|
||||
name: 'Original Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
author: existingAgentAuthorId,
|
||||
description: 'Original description',
|
||||
isCollaborative: false,
|
||||
versions: [
|
||||
{
|
||||
name: 'Original Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-3.5-turbo',
|
||||
description: 'Original description',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
existingAgentId = agent.id;
|
||||
});
|
||||
|
||||
test('should update agent with allowed fields only', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString(); // Set as author
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Updated Agent',
|
||||
description: 'Updated description',
|
||||
model: 'gpt-4',
|
||||
isCollaborative: true, // This IS allowed in updates
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(400);
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.name).toBe('Updated Agent');
|
||||
expect(updatedAgent.description).toBe('Updated description');
|
||||
expect(updatedAgent.model).toBe('gpt-4');
|
||||
expect(updatedAgent.isCollaborative).toBe(true);
|
||||
expect(updatedAgent.author).toBe(existingAgentAuthorId.toString());
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.name).toBe('Updated Agent');
|
||||
expect(agentInDb.isCollaborative).toBe(true);
|
||||
});
|
||||
|
||||
test('should reject update with unauthorized fields (mass assignment protection)', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Updated Name',
|
||||
|
||||
// Unauthorized fields that should be stripped
|
||||
author: new mongoose.Types.ObjectId().toString(), // Should not be able to change author
|
||||
authorName: 'Hacker', // Should be stripped
|
||||
id: 'different_agent_id', // Should be stripped
|
||||
_id: new mongoose.Types.ObjectId(), // Should be stripped
|
||||
versions: [], // Should be stripped
|
||||
createdAt: new Date('2020-01-01'), // Should be stripped
|
||||
updatedAt: new Date('2020-01-01'), // Should be stripped
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify unauthorized fields were not changed
|
||||
expect(updatedAgent.author).toBe(existingAgentAuthorId.toString()); // Should not have changed
|
||||
expect(updatedAgent.authorName).toBeUndefined();
|
||||
expect(updatedAgent.id).toBe(existingAgentId); // Should not have changed
|
||||
expect(updatedAgent.name).toBe('Updated Name'); // Only this should have changed
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.author.toString()).toBe(existingAgentAuthorId.toString());
|
||||
expect(agentInDb.id).toBe(existingAgentId);
|
||||
});
|
||||
|
||||
test('should reject update from non-author when not collaborative', async () => {
|
||||
const differentUserId = new mongoose.Types.ObjectId().toString();
|
||||
mockReq.user.id = differentUserId; // Different user
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Unauthorized Update',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
|
||||
// Verify agent was not modified in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.name).toBe('Original Agent');
|
||||
});
|
||||
|
||||
test('should allow update from non-author when collaborative', async () => {
|
||||
// First make the agent collaborative
|
||||
await Agent.updateOne({ id: existingAgentId }, { isCollaborative: true });
|
||||
|
||||
const differentUserId = new mongoose.Types.ObjectId().toString();
|
||||
mockReq.user.id = differentUserId; // Different user
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Collaborative Update',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.name).toBe('Collaborative Update');
|
||||
// Author field should be removed for non-author
|
||||
expect(updatedAgent.author).toBeUndefined();
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.name).toBe('Collaborative Update');
|
||||
});
|
||||
|
||||
test('should allow admin to update any agent', async () => {
|
||||
const adminUserId = new mongoose.Types.ObjectId().toString();
|
||||
mockReq.user.id = adminUserId;
|
||||
mockReq.user.role = 'ADMIN'; // Set as admin
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
name: 'Admin Update',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).not.toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.name).toBe('Admin Update');
|
||||
});
|
||||
|
||||
test('should handle projectIds updates', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
|
||||
const projectId1 = new mongoose.Types.ObjectId().toString();
|
||||
const projectId2 = new mongoose.Types.ObjectId().toString();
|
||||
|
||||
mockReq.body = {
|
||||
projectIds: [projectId1, projectId2],
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent).toBeDefined();
|
||||
// Note: updateAgentProjects requires more setup, so we just verify the handler doesn't crash
|
||||
});
|
||||
|
||||
test('should validate tool_resources in updates', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
tool_resources: {
|
||||
ocr: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
},
|
||||
execute_code: {
|
||||
file_ids: ['img1'],
|
||||
},
|
||||
// Invalid tool resource
|
||||
invalid_tool: {
|
||||
file_ids: ['invalid'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.json).toHaveBeenCalled();
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.tool_resources).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.ocr).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.execute_code).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.invalid_tool).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should return 404 for non-existent agent', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = `agent_${uuidv4()}`; // Non-existent ID
|
||||
mockReq.body = {
|
||||
name: 'Update Non-existent',
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(404);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({ error: 'Agent not found' });
|
||||
});
|
||||
|
||||
test('should handle validation errors properly', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
model_parameters: 'invalid-not-an-object', // Should be an object
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(400);
|
||||
expect(mockRes.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: 'Invalid request data',
|
||||
details: expect.any(Array),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mass Assignment Attack Scenarios', () => {
|
||||
test('should prevent setting system fields during creation', async () => {
|
||||
const systemFields = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'System Fields Test',
|
||||
|
||||
// System fields that should never be settable by users
|
||||
__v: 99,
|
||||
_id: new mongoose.Types.ObjectId(),
|
||||
versions: [
|
||||
{
|
||||
name: 'Fake Version',
|
||||
provider: 'fake',
|
||||
model: 'fake-model',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
mockReq.body = systemFields;
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify system fields were not affected
|
||||
expect(createdAgent.__v).not.toBe(99);
|
||||
expect(createdAgent.versions).toHaveLength(1); // Should only have the auto-created version
|
||||
expect(createdAgent.versions[0].name).toBe('System Fields Test'); // From actual creation
|
||||
expect(createdAgent.versions[0].provider).toBe('openai'); // From actual creation
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||
expect(agentInDb.__v).not.toBe(99);
|
||||
});
|
||||
|
||||
test('should prevent privilege escalation through isCollaborative', async () => {
|
||||
// Create a non-collaborative agent
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const agent = await Agent.create({
|
||||
id: `agent_${uuidv4()}`,
|
||||
name: 'Private Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: authorId,
|
||||
isCollaborative: false,
|
||||
versions: [
|
||||
{
|
||||
name: 'Private Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Try to make it collaborative as a different user
|
||||
const attackerId = new mongoose.Types.ObjectId().toString();
|
||||
mockReq.user.id = attackerId;
|
||||
mockReq.params.id = agent.id;
|
||||
mockReq.body = {
|
||||
isCollaborative: true, // Trying to escalate privileges
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
// Should be rejected
|
||||
expect(mockRes.status).toHaveBeenCalledWith(403);
|
||||
|
||||
// Verify in database that it's still not collaborative
|
||||
const agentInDb = await Agent.findOne({ id: agent.id });
|
||||
expect(agentInDb.isCollaborative).toBe(false);
|
||||
});
|
||||
|
||||
test('should prevent author hijacking', async () => {
|
||||
const originalAuthorId = new mongoose.Types.ObjectId();
|
||||
const attackerId = new mongoose.Types.ObjectId();
|
||||
|
||||
// Admin creates an agent
|
||||
mockReq.user.id = originalAuthorId.toString();
|
||||
mockReq.user.role = 'ADMIN';
|
||||
mockReq.body = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Admin Agent',
|
||||
author: attackerId.toString(), // Trying to set different author
|
||||
};
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Author should be the actual user, not the attempted value
|
||||
expect(createdAgent.author.toString()).toBe(originalAuthorId.toString());
|
||||
expect(createdAgent.author.toString()).not.toBe(attackerId.toString());
|
||||
|
||||
// Verify in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id });
|
||||
expect(agentInDb.author.toString()).toBe(originalAuthorId.toString());
|
||||
});
|
||||
|
||||
test('should strip unknown fields to prevent future vulnerabilities', async () => {
|
||||
mockReq.body = {
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
name: 'Future Proof Test',
|
||||
|
||||
// Unknown fields that might be added in future
|
||||
superAdminAccess: true,
|
||||
bypassAllChecks: true,
|
||||
internalFlag: 'secret',
|
||||
futureFeature: 'exploit',
|
||||
};
|
||||
|
||||
await createAgentHandler(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(201);
|
||||
|
||||
const createdAgent = mockRes.json.mock.calls[0][0];
|
||||
|
||||
// Verify unknown fields were stripped
|
||||
expect(createdAgent.superAdminAccess).toBeUndefined();
|
||||
expect(createdAgent.bypassAllChecks).toBeUndefined();
|
||||
expect(createdAgent.internalFlag).toBeUndefined();
|
||||
expect(createdAgent.futureFeature).toBeUndefined();
|
||||
|
||||
// Also check in database
|
||||
const agentInDb = await Agent.findOne({ id: createdAgent.id }).lean();
|
||||
expect(agentInDb.superAdminAccess).toBeUndefined();
|
||||
expect(agentInDb.bypassAllChecks).toBeUndefined();
|
||||
expect(agentInDb.internalFlag).toBeUndefined();
|
||||
expect(agentInDb.futureFeature).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -19,20 +22,20 @@ const {
|
||||
addThreadMetadata,
|
||||
saveAssistantMessage,
|
||||
} = require('~/server/services/Threads');
|
||||
const { sendResponse, sendMessage, sleep, countTokens } = require('~/server/utils');
|
||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { sendResponse } = require('~/server/middleware/error');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @route POST /
|
||||
@@ -471,7 +474,7 @@ const chatV1 = async (req, res) => {
|
||||
await Promise.all(promises);
|
||||
|
||||
const sendInitialResponse = () => {
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
sync: true,
|
||||
conversationId,
|
||||
// messages: previousMessages,
|
||||
@@ -587,7 +590,7 @@ const chatV1 = async (req, res) => {
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -22,15 +25,14 @@ const { createErrorHandler } = require('~/server/controllers/assistants/errors')
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { sendMessage, sleep, countTokens } = require('~/server/utils');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* @route POST /
|
||||
@@ -309,7 +311,7 @@ const chatV2 = async (req, res) => {
|
||||
await Promise.all(promises);
|
||||
|
||||
const sendInitialResponse = () => {
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
sync: true,
|
||||
conversationId,
|
||||
// messages: previousMessages,
|
||||
@@ -432,7 +434,7 @@ const chatV2 = async (req, res) => {
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
sendEvent(res, {
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// errorHandler.js
|
||||
const { sendResponse } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, ViolationTypes, ContentTypes } = require('librechat-data-provider');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const { recordUsage, checkMessageGaps } = require('~/server/services/Threads');
|
||||
const { sendResponse } = require('~/server/middleware/error');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerContext
|
||||
@@ -78,7 +78,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
|
||||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === 'azureAssistants'
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const { nanoid } = require('nanoid');
|
||||
const { EnvVar } = require('@librechat/agents');
|
||||
const { checkAccess } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Tools,
|
||||
AuthType,
|
||||
@@ -13,9 +15,8 @@ const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { checkAccess } = require('~/server/middleware');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { getMessage } = require('~/models/Message');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const fieldsMap = {
|
||||
[Tools.execute_code]: [EnvVar.CODE_API_KEY],
|
||||
@@ -79,6 +80,7 @@ const verifyToolAuth = async (req, res) => {
|
||||
throwError: false,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error loading auth values', error);
|
||||
res.status(200).json({ authenticated: false, message: AuthType.USER_PROVIDED });
|
||||
return;
|
||||
}
|
||||
@@ -132,7 +134,12 @@ const callTool = async (req, res) => {
|
||||
logger.debug(`[${toolId}/call] User: ${req.user.id}`);
|
||||
let hasAccess = true;
|
||||
if (toolAccessPermType[toolId]) {
|
||||
hasAccess = await checkAccess(req.user, toolAccessPermType[toolId], [Permissions.USE]);
|
||||
hasAccess = await checkAccess({
|
||||
user: req.user,
|
||||
permissionType: toolAccessPermType[toolId],
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
}
|
||||
if (!hasAccess) {
|
||||
logger.warn(
|
||||
|
||||
@@ -55,7 +55,6 @@ const startServer = async () => {
|
||||
|
||||
/* Middleware */
|
||||
app.use(noIndex);
|
||||
app.use(errorController);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(mongoSanitize());
|
||||
@@ -121,6 +120,9 @@ const startServer = async () => {
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use('/api/mcp', routes.mcp);
|
||||
|
||||
// Add the error controller one more time after all routes
|
||||
app.use(errorController);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.set({
|
||||
'Cache-Control': process.env.INDEX_CACHE_CONTROL || 'no-cache, no-store, must-revalidate',
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const request = require('supertest');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const mongoose = require('mongoose');
|
||||
@@ -59,6 +58,30 @@ describe('Server Configuration', () => {
|
||||
expect(response.headers['pragma']).toBe('no-cache');
|
||||
expect(response.headers['expires']).toBe('0');
|
||||
});
|
||||
|
||||
it('should return 500 for unknown errors via ErrorController', async () => {
|
||||
// Testing the error handling here on top of unit tests to ensure the middleware is correctly integrated
|
||||
|
||||
// Mock MongoDB operations to fail
|
||||
const originalFindOne = mongoose.models.User.findOne;
|
||||
const mockError = new Error('MongoDB operation failed');
|
||||
mongoose.models.User.findOne = jest.fn().mockImplementation(() => {
|
||||
throw mockError;
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await request(app).post('/api/auth/login').send({
|
||||
email: 'test@example.com',
|
||||
password: 'password123',
|
||||
});
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.text).toBe('An unknown error occurred.');
|
||||
} finally {
|
||||
// Restore original function
|
||||
mongoose.models.User.findOne = originalFindOne;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Polls the /health endpoint every 30ms for up to 10 seconds to wait for the server to start completely
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
// abortMiddleware.js
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
|
||||
const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider');
|
||||
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
const { sendError } = require('~/server/middleware/error');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const abortControllers = require('./abortControllers');
|
||||
const { saveMessage, getConvo } = require('~/models');
|
||||
const { abortRun } = require('./abortRun');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const abortDataMap = new WeakMap();
|
||||
|
||||
@@ -101,7 +101,7 @@ async function abortMessage(req, res) {
|
||||
cleanupAbortController(abortKey);
|
||||
|
||||
if (res.headersSent && finalEvent) {
|
||||
return sendMessage(res, finalEvent);
|
||||
return sendEvent(res, finalEvent);
|
||||
}
|
||||
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
@@ -174,7 +174,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
* @param {string} responseMessageId
|
||||
*/
|
||||
const onStart = (userMessage, responseMessageId) => {
|
||||
sendMessage(res, { message: userMessage, created: true });
|
||||
sendEvent(res, { message: userMessage, created: true });
|
||||
|
||||
const abortKey = userMessage?.conversationId ?? req.user.id;
|
||||
getReqData({ abortKey });
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, RunStatus, isUUID } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { checkMessageGaps, recordUsage } = require('~/server/services/Threads');
|
||||
const { deleteMessages } = require('~/models/Message');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { sendMessage } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const three_minutes = 1000 * 60 * 3;
|
||||
|
||||
@@ -34,7 +34,7 @@ async function abortRun(req, res) {
|
||||
const [thread_id, run_id] = runValues.split(':');
|
||||
|
||||
if (!run_id) {
|
||||
logger.warn('[abortRun] Couldn\'t find run for cancel request', { thread_id });
|
||||
logger.warn("[abortRun] Couldn't find run for cancel request", { thread_id });
|
||||
return res.status(204).send({ message: 'Run not found' });
|
||||
} else if (run_id === 'cancelled') {
|
||||
logger.warn('[abortRun] Run already cancelled', { thread_id });
|
||||
@@ -93,7 +93,7 @@ async function abortRun(req, res) {
|
||||
};
|
||||
|
||||
if (res.headersSent && finalEvent) {
|
||||
return sendMessage(res, finalEvent);
|
||||
return sendEvent(res, finalEvent);
|
||||
}
|
||||
|
||||
res.json(finalEvent);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const { handleError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
EndpointURLs,
|
||||
@@ -14,7 +15,6 @@ const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const agents = require('~/server/services/Endpoints/agents');
|
||||
const custom = require('~/server/services/Endpoints/custom');
|
||||
const google = require('~/server/services/Endpoints/google');
|
||||
const { handleError } = require('~/server/utils');
|
||||
|
||||
const buildFunction = {
|
||||
[EModelEndpoint.openAI]: openAI.buildOptions,
|
||||
|
||||
@@ -18,7 +18,6 @@ const message = 'Your account has been temporarily banned due to violations of o
|
||||
* @function
|
||||
* @param {Object} req - Express Request object.
|
||||
* @param {Object} res - Express Response object.
|
||||
* @param {String} errorMessage - Error message to be displayed in case of /api/ask or /api/edit request.
|
||||
*
|
||||
* @returns {Promise<Object>} - Returns a Promise which when resolved sends a response status of 403 with a specific message if request is not of api/ask or api/edit types. If it is, calls `denyRequest()` function.
|
||||
*/
|
||||
@@ -135,6 +134,7 @@ const checkBan = async (req, res, next = () => {}) => {
|
||||
return await banResponse(req, res);
|
||||
} catch (error) {
|
||||
logger.error('Error in checkBan middleware:', error);
|
||||
return next(error);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const crypto = require('crypto');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { getResponseSender, Constants } = require('librechat-data-provider');
|
||||
const { sendMessage, sendError } = require('~/server/utils');
|
||||
const { sendError } = require('~/server/middleware/error');
|
||||
const { saveMessage } = require('~/models');
|
||||
|
||||
/**
|
||||
@@ -36,7 +37,7 @@ const denyRequest = async (req, res, errorMessage) => {
|
||||
isCreatedByUser: true,
|
||||
text,
|
||||
};
|
||||
sendMessage(res, { message: userMessage, created: true });
|
||||
sendEvent(res, { message: userMessage, created: true });
|
||||
|
||||
const shouldSaveMessage = _convoId && parentMessageId && parentMessageId !== Constants.NO_PARENT;
|
||||
|
||||
|
||||
@@ -1,31 +1,9 @@
|
||||
const crypto = require('crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { parseConvo } = require('librechat-data-provider');
|
||||
const { sendEvent, handleError } = require('@librechat/api');
|
||||
const { saveMessage, getMessages } = require('~/models/Message');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sends error data in Server Sent Events format and ends the response.
|
||||
* @param {object} res - The server response.
|
||||
* @param {string} message - The error message.
|
||||
*/
|
||||
const handleError = (res, message) => {
|
||||
res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`);
|
||||
res.end();
|
||||
};
|
||||
|
||||
/**
|
||||
* Sends message data in Server Sent Events format.
|
||||
* @param {Express.Response} res - - The server response.
|
||||
* @param {string | Object} message - The message to be sent.
|
||||
* @param {'message' | 'error' | 'cancel'} event - [Optional] The type of event. Default is 'message'.
|
||||
*/
|
||||
const sendMessage = (res, message, event = 'message') => {
|
||||
if (typeof message === 'string' && message.length === 0) {
|
||||
return;
|
||||
}
|
||||
res.write(`event: ${event}\ndata: ${JSON.stringify(message)}\n\n`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes an error with provided options, saves the error message and sends a corresponding SSE response
|
||||
@@ -91,7 +69,7 @@ const sendError = async (req, res, options, callback) => {
|
||||
convo = parseConvo(errorMessage);
|
||||
}
|
||||
|
||||
return sendMessage(res, {
|
||||
return sendEvent(res, {
|
||||
final: true,
|
||||
requestMessage: query?.[0] ? query[0] : requestMessage,
|
||||
responseMessage: errorMessage,
|
||||
@@ -120,12 +98,10 @@ const sendResponse = (req, res, data, errorMessage) => {
|
||||
if (errorMessage) {
|
||||
return sendError(req, res, { ...data, text: errorMessage });
|
||||
}
|
||||
return sendMessage(res, data);
|
||||
return sendEvent(res, data);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
sendResponse,
|
||||
handleError,
|
||||
sendMessage,
|
||||
sendError,
|
||||
sendResponse,
|
||||
};
|
||||
95
api/server/middleware/limiters/forkLimiters.js
Normal file
95
api/server/middleware/limiters/forkLimiters.js
Normal file
@@ -0,0 +1,95 @@
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const ioredisClient = require('~/cache/ioredisClient');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
const FORK_IP_MAX = parseInt(process.env.FORK_IP_MAX) || 30;
|
||||
const FORK_IP_WINDOW = parseInt(process.env.FORK_IP_WINDOW) || 1;
|
||||
const FORK_USER_MAX = parseInt(process.env.FORK_USER_MAX) || 7;
|
||||
const FORK_USER_WINDOW = parseInt(process.env.FORK_USER_WINDOW) || 1;
|
||||
const FORK_VIOLATION_SCORE = process.env.FORK_VIOLATION_SCORE;
|
||||
|
||||
const forkIpWindowMs = FORK_IP_WINDOW * 60 * 1000;
|
||||
const forkIpMax = FORK_IP_MAX;
|
||||
const forkIpWindowInMinutes = forkIpWindowMs / 60000;
|
||||
|
||||
const forkUserWindowMs = FORK_USER_WINDOW * 60 * 1000;
|
||||
const forkUserMax = FORK_USER_MAX;
|
||||
const forkUserWindowInMinutes = forkUserWindowMs / 60000;
|
||||
|
||||
return {
|
||||
forkIpWindowMs,
|
||||
forkIpMax,
|
||||
forkIpWindowInMinutes,
|
||||
forkUserWindowMs,
|
||||
forkUserMax,
|
||||
forkUserWindowInMinutes,
|
||||
forkViolationScore: FORK_VIOLATION_SCORE,
|
||||
};
|
||||
};
|
||||
|
||||
const createForkHandler = (ip = true) => {
|
||||
const {
|
||||
forkIpMax,
|
||||
forkUserMax,
|
||||
forkViolationScore,
|
||||
forkIpWindowInMinutes,
|
||||
forkUserWindowInMinutes,
|
||||
} = getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
const type = ViolationTypes.FILE_UPLOAD_LIMIT;
|
||||
const errorMessage = {
|
||||
type,
|
||||
max: ip ? forkIpMax : forkUserMax,
|
||||
limiter: ip ? 'ip' : 'user',
|
||||
windowInMinutes: ip ? forkIpWindowInMinutes : forkUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage, forkViolationScore);
|
||||
res.status(429).json({ message: 'Too many conversation fork requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
const createForkLimiters = () => {
|
||||
const { forkIpWindowMs, forkIpMax, forkUserWindowMs, forkUserMax } = getEnvironmentVariables();
|
||||
|
||||
const ipLimiterOptions = {
|
||||
windowMs: forkIpWindowMs,
|
||||
max: forkIpMax,
|
||||
handler: createForkHandler(),
|
||||
};
|
||||
const userLimiterOptions = {
|
||||
windowMs: forkUserWindowMs,
|
||||
max: forkUserMax,
|
||||
handler: createForkHandler(false),
|
||||
keyGenerator: function (req) {
|
||||
return req.user?.id;
|
||||
},
|
||||
};
|
||||
|
||||
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
|
||||
logger.debug('Using Redis for fork rate limiters.');
|
||||
const sendCommand = (...args) => ioredisClient.call(...args);
|
||||
const ipStore = new RedisStore({
|
||||
sendCommand,
|
||||
prefix: 'fork_ip_limiter:',
|
||||
});
|
||||
const userStore = new RedisStore({
|
||||
sendCommand,
|
||||
prefix: 'fork_user_limiter:',
|
||||
});
|
||||
ipLimiterOptions.store = ipStore;
|
||||
userLimiterOptions.store = userStore;
|
||||
}
|
||||
|
||||
const forkIpLimiter = rateLimit(ipLimiterOptions);
|
||||
const forkUserLimiter = rateLimit(userLimiterOptions);
|
||||
return { forkIpLimiter, forkUserLimiter };
|
||||
};
|
||||
|
||||
module.exports = { createForkLimiters };
|
||||
@@ -1,16 +1,17 @@
|
||||
const rateLimit = require('express-rate-limit');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const ioredisClient = require('~/cache/ioredisClient');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
const IMPORT_IP_MAX = parseInt(process.env.IMPORT_IP_MAX) || 100;
|
||||
const IMPORT_IP_WINDOW = parseInt(process.env.IMPORT_IP_WINDOW) || 15;
|
||||
const IMPORT_USER_MAX = parseInt(process.env.IMPORT_USER_MAX) || 50;
|
||||
const IMPORT_USER_WINDOW = parseInt(process.env.IMPORT_USER_WINDOW) || 15;
|
||||
const IMPORT_VIOLATION_SCORE = process.env.IMPORT_VIOLATION_SCORE;
|
||||
|
||||
const importIpWindowMs = IMPORT_IP_WINDOW * 60 * 1000;
|
||||
const importIpMax = IMPORT_IP_MAX;
|
||||
@@ -27,12 +28,18 @@ const getEnvironmentVariables = () => {
|
||||
importUserWindowMs,
|
||||
importUserMax,
|
||||
importUserWindowInMinutes,
|
||||
importViolationScore: IMPORT_VIOLATION_SCORE,
|
||||
};
|
||||
};
|
||||
|
||||
const createImportHandler = (ip = true) => {
|
||||
const { importIpMax, importIpWindowInMinutes, importUserMax, importUserWindowInMinutes } =
|
||||
getEnvironmentVariables();
|
||||
const {
|
||||
importIpMax,
|
||||
importUserMax,
|
||||
importViolationScore,
|
||||
importIpWindowInMinutes,
|
||||
importUserWindowInMinutes,
|
||||
} = getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
const type = ViolationTypes.FILE_UPLOAD_LIMIT;
|
||||
@@ -43,7 +50,7 @@ const createImportHandler = (ip = true) => {
|
||||
windowInMinutes: ip ? importIpWindowInMinutes : importUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
await logViolation(req, res, type, errorMessage, importViolationScore);
|
||||
res.status(429).json({ message: 'Too many conversation import requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
@@ -4,6 +4,7 @@ const createSTTLimiters = require('./sttLimiters');
|
||||
const loginLimiter = require('./loginLimiter');
|
||||
const importLimiters = require('./importLimiters');
|
||||
const uploadLimiters = require('./uploadLimiters');
|
||||
const forkLimiters = require('./forkLimiters');
|
||||
const registerLimiter = require('./registerLimiter');
|
||||
const toolCallLimiter = require('./toolCallLimiter');
|
||||
const messageLimiters = require('./messageLimiters');
|
||||
@@ -14,6 +15,7 @@ module.exports = {
|
||||
...uploadLimiters,
|
||||
...importLimiters,
|
||||
...messageLimiters,
|
||||
...forkLimiters,
|
||||
loginLimiter,
|
||||
registerLimiter,
|
||||
toolCallLimiter,
|
||||
|
||||
@@ -11,6 +11,7 @@ const {
|
||||
MESSAGE_IP_WINDOW = 1,
|
||||
MESSAGE_USER_MAX = 40,
|
||||
MESSAGE_USER_WINDOW = 1,
|
||||
MESSAGE_VIOLATION_SCORE: score,
|
||||
} = process.env;
|
||||
|
||||
const ipWindowMs = MESSAGE_IP_WINDOW * 60 * 1000;
|
||||
@@ -39,7 +40,7 @@ const createHandler = (ip = true) => {
|
||||
windowInMinutes: ip ? ipWindowInMinutes : userWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
await logViolation(req, res, type, errorMessage, score);
|
||||
return await denyRequest(req, res, errorMessage);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -11,6 +11,7 @@ const getEnvironmentVariables = () => {
|
||||
const STT_IP_WINDOW = parseInt(process.env.STT_IP_WINDOW) || 1;
|
||||
const STT_USER_MAX = parseInt(process.env.STT_USER_MAX) || 50;
|
||||
const STT_USER_WINDOW = parseInt(process.env.STT_USER_WINDOW) || 1;
|
||||
const STT_VIOLATION_SCORE = process.env.STT_VIOLATION_SCORE;
|
||||
|
||||
const sttIpWindowMs = STT_IP_WINDOW * 60 * 1000;
|
||||
const sttIpMax = STT_IP_MAX;
|
||||
@@ -27,11 +28,12 @@ const getEnvironmentVariables = () => {
|
||||
sttUserWindowMs,
|
||||
sttUserMax,
|
||||
sttUserWindowInMinutes,
|
||||
sttViolationScore: STT_VIOLATION_SCORE,
|
||||
};
|
||||
};
|
||||
|
||||
const createSTTHandler = (ip = true) => {
|
||||
const { sttIpMax, sttIpWindowInMinutes, sttUserMax, sttUserWindowInMinutes } =
|
||||
const { sttIpMax, sttIpWindowInMinutes, sttUserMax, sttUserWindowInMinutes, sttViolationScore } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
@@ -43,7 +45,7 @@ const createSTTHandler = (ip = true) => {
|
||||
windowInMinutes: ip ? sttIpWindowInMinutes : sttUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
await logViolation(req, res, type, errorMessage, sttViolationScore);
|
||||
res.status(429).json({ message: 'Too many STT requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
@@ -6,6 +6,8 @@ const logViolation = require('~/cache/logViolation');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { TOOL_CALL_VIOLATION_SCORE: score } = process.env;
|
||||
|
||||
const handler = async (req, res) => {
|
||||
const type = ViolationTypes.TOOL_CALL_LIMIT;
|
||||
const errorMessage = {
|
||||
@@ -15,7 +17,7 @@ const handler = async (req, res) => {
|
||||
windowInMinutes: 1,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage, 0);
|
||||
await logViolation(req, res, type, errorMessage, score);
|
||||
res.status(429).json({ message: 'Too many tool call requests. Try again later' });
|
||||
};
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ const getEnvironmentVariables = () => {
|
||||
const TTS_IP_WINDOW = parseInt(process.env.TTS_IP_WINDOW) || 1;
|
||||
const TTS_USER_MAX = parseInt(process.env.TTS_USER_MAX) || 50;
|
||||
const TTS_USER_WINDOW = parseInt(process.env.TTS_USER_WINDOW) || 1;
|
||||
const TTS_VIOLATION_SCORE = process.env.TTS_VIOLATION_SCORE;
|
||||
|
||||
const ttsIpWindowMs = TTS_IP_WINDOW * 60 * 1000;
|
||||
const ttsIpMax = TTS_IP_MAX;
|
||||
@@ -27,11 +28,12 @@ const getEnvironmentVariables = () => {
|
||||
ttsUserWindowMs,
|
||||
ttsUserMax,
|
||||
ttsUserWindowInMinutes,
|
||||
ttsViolationScore: TTS_VIOLATION_SCORE,
|
||||
};
|
||||
};
|
||||
|
||||
const createTTSHandler = (ip = true) => {
|
||||
const { ttsIpMax, ttsIpWindowInMinutes, ttsUserMax, ttsUserWindowInMinutes } =
|
||||
const { ttsIpMax, ttsIpWindowInMinutes, ttsUserMax, ttsUserWindowInMinutes, ttsViolationScore } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
@@ -43,7 +45,7 @@ const createTTSHandler = (ip = true) => {
|
||||
windowInMinutes: ip ? ttsIpWindowInMinutes : ttsUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
await logViolation(req, res, type, errorMessage, ttsViolationScore);
|
||||
res.status(429).json({ message: 'Too many TTS requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
@@ -11,6 +11,7 @@ const getEnvironmentVariables = () => {
|
||||
const FILE_UPLOAD_IP_WINDOW = parseInt(process.env.FILE_UPLOAD_IP_WINDOW) || 15;
|
||||
const FILE_UPLOAD_USER_MAX = parseInt(process.env.FILE_UPLOAD_USER_MAX) || 50;
|
||||
const FILE_UPLOAD_USER_WINDOW = parseInt(process.env.FILE_UPLOAD_USER_WINDOW) || 15;
|
||||
const FILE_UPLOAD_VIOLATION_SCORE = process.env.FILE_UPLOAD_VIOLATION_SCORE;
|
||||
|
||||
const fileUploadIpWindowMs = FILE_UPLOAD_IP_WINDOW * 60 * 1000;
|
||||
const fileUploadIpMax = FILE_UPLOAD_IP_MAX;
|
||||
@@ -27,6 +28,7 @@ const getEnvironmentVariables = () => {
|
||||
fileUploadUserWindowMs,
|
||||
fileUploadUserMax,
|
||||
fileUploadUserWindowInMinutes,
|
||||
fileUploadViolationScore: FILE_UPLOAD_VIOLATION_SCORE,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -36,6 +38,7 @@ const createFileUploadHandler = (ip = true) => {
|
||||
fileUploadIpWindowInMinutes,
|
||||
fileUploadUserMax,
|
||||
fileUploadUserWindowInMinutes,
|
||||
fileUploadViolationScore,
|
||||
} = getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
@@ -47,7 +50,7 @@ const createFileUploadHandler = (ip = true) => {
|
||||
windowInMinutes: ip ? fileUploadIpWindowInMinutes : fileUploadUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
await logViolation(req, res, type, errorMessage, fileUploadViolationScore);
|
||||
res.status(429).json({ message: 'Too many file upload requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Core function to check if a user has one or more required permissions
|
||||
*
|
||||
* @param {object} user - The user object
|
||||
* @param {PermissionTypes} permissionType - The type of permission to check
|
||||
* @param {Permissions[]} permissions - The list of specific permissions to check
|
||||
* @param {Record<Permissions, string[]>} [bodyProps] - An optional object where keys are permissions and values are arrays of properties to check
|
||||
* @param {object} [checkObject] - The object to check properties against
|
||||
* @returns {Promise<boolean>} Whether the user has the required permissions
|
||||
*/
|
||||
const checkAccess = async (user, permissionType, permissions, bodyProps = {}, checkObject = {}) => {
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const role = await getRoleByName(user.role);
|
||||
if (role && role.permissions && role.permissions[permissionType]) {
|
||||
const hasAnyPermission = permissions.some((permission) => {
|
||||
if (role.permissions[permissionType][permission]) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (bodyProps[permission] && checkObject) {
|
||||
return bodyProps[permission].some((prop) =>
|
||||
Object.prototype.hasOwnProperty.call(checkObject, prop),
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
return hasAnyPermission;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties.
|
||||
*
|
||||
* @param {PermissionTypes} permissionType - The type of permission to check.
|
||||
* @param {Permissions[]} permissions - The list of specific permissions to check.
|
||||
* @param {Record<Permissions, string[]>} [bodyProps] - An optional object where keys are permissions and values are arrays of `req.body` properties to check.
|
||||
* @returns {(req: ServerRequest, res: ServerResponse, next: NextFunction) => Promise<void>} Express middleware function.
|
||||
*/
|
||||
const generateCheckAccess = (permissionType, permissions, bodyProps = {}) => {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
const hasAccess = await checkAccess(
|
||||
req.user,
|
||||
permissionType,
|
||||
permissions,
|
||||
bodyProps,
|
||||
req.body,
|
||||
);
|
||||
|
||||
if (hasAccess) {
|
||||
return next();
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
`[${permissionType}] Forbidden: Insufficient permissions for User ${req.user.id}: ${permissions.join(', ')}`,
|
||||
);
|
||||
return res.status(403).json({ message: 'Forbidden: Insufficient permissions' });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return res.status(500).json({ message: `Server error: ${error.message}` });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
checkAccess,
|
||||
generateCheckAccess,
|
||||
};
|
||||
@@ -1,8 +1,5 @@
|
||||
const checkAdmin = require('./admin');
|
||||
const { checkAccess, generateCheckAccess } = require('./access');
|
||||
|
||||
module.exports = {
|
||||
checkAdmin,
|
||||
checkAccess,
|
||||
generateCheckAccess,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const uap = require('ua-parser-js');
|
||||
const { handleError } = require('../utils');
|
||||
const { handleError } = require('@librechat/api');
|
||||
const { logViolation } = require('../../cache');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { handleError } = require('../utils');
|
||||
const { handleError } = require('@librechat/api');
|
||||
|
||||
function validateEndpoint(req, res, next) {
|
||||
const { endpoint: _endpoint, endpointType } = req.body;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { handleError } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||
const { handleError } = require('~/server/utils');
|
||||
const { logViolation } = require('~/cache');
|
||||
/**
|
||||
* Validates the model of the request.
|
||||
|
||||
162
api/server/routes/__tests__/static.spec.js
Normal file
162
api/server/routes/__tests__/static.spec.js
Normal file
@@ -0,0 +1,162 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const zlib = require('zlib');
|
||||
|
||||
// Create test setup
|
||||
const mockTestDir = path.join(__dirname, 'test-static-route');
|
||||
|
||||
// Mock the paths module to point to our test directory
|
||||
jest.mock('~/config/paths', () => ({
|
||||
imageOutput: mockTestDir,
|
||||
}));
|
||||
|
||||
describe('Static Route Integration', () => {
|
||||
let app;
|
||||
let staticRoute;
|
||||
let testDir;
|
||||
let testImagePath;
|
||||
|
||||
beforeAll(() => {
|
||||
// Create a test directory and files
|
||||
testDir = mockTestDir;
|
||||
testImagePath = path.join(testDir, 'test-image.jpg');
|
||||
|
||||
if (!fs.existsSync(testDir)) {
|
||||
fs.mkdirSync(testDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create a test image file
|
||||
fs.writeFileSync(testImagePath, 'fake-image-data');
|
||||
|
||||
// Create a gzipped version of the test image (for gzip scanning tests)
|
||||
fs.writeFileSync(testImagePath + '.gz', zlib.gzipSync('fake-image-data'));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Clean up test files
|
||||
if (fs.existsSync(testDir)) {
|
||||
fs.rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// Helper function to set up static route with specific config
|
||||
const setupStaticRoute = (skipGzipScan = false) => {
|
||||
if (skipGzipScan) {
|
||||
delete process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN;
|
||||
} else {
|
||||
process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN = 'true';
|
||||
}
|
||||
|
||||
staticRoute = require('../static');
|
||||
app.use('/images', staticRoute);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear the module cache to get fresh imports
|
||||
jest.resetModules();
|
||||
|
||||
app = express();
|
||||
|
||||
// Clear environment variables
|
||||
delete process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN;
|
||||
delete process.env.NODE_ENV;
|
||||
});
|
||||
|
||||
describe('route functionality', () => {
|
||||
it('should serve static image files', async () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/test-image.jpg').expect(200);
|
||||
|
||||
expect(response.body.toString()).toBe('fake-image-data');
|
||||
});
|
||||
|
||||
it('should return 404 for non-existent files', async () => {
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/nonexistent.jpg');
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache behavior', () => {
|
||||
it('should set cache headers for images in production', async () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/test-image.jpg').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
});
|
||||
|
||||
it('should not set cache headers in development', async () => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/test-image.jpg').expect(200);
|
||||
|
||||
// Our middleware should not set the production cache-control header in development
|
||||
expect(response.headers['cache-control']).not.toBe('public, max-age=172800, s-maxage=86400');
|
||||
});
|
||||
});
|
||||
|
||||
describe('gzip compression behavior', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should serve gzipped files when gzip scanning is enabled', async () => {
|
||||
setupStaticRoute(false); // Enable gzip scanning
|
||||
|
||||
const response = await request(app)
|
||||
.get('/images/test-image.jpg')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.body.toString()).toBe('fake-image-data');
|
||||
});
|
||||
|
||||
it('should not serve gzipped files when gzip scanning is disabled', async () => {
|
||||
setupStaticRoute(true); // Disable gzip scanning
|
||||
|
||||
const response = await request(app)
|
||||
.get('/images/test-image.jpg')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBeUndefined();
|
||||
expect(response.body.toString()).toBe('fake-image-data');
|
||||
});
|
||||
});
|
||||
|
||||
describe('path configuration', () => {
|
||||
it('should use the configured imageOutput path', async () => {
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/test-image.jpg').expect(200);
|
||||
|
||||
expect(response.body.toString()).toBe('fake-image-data');
|
||||
});
|
||||
|
||||
it('should serve from subdirectories', async () => {
|
||||
// Create a subdirectory with a file
|
||||
const subDir = path.join(testDir, 'thumbs');
|
||||
fs.mkdirSync(subDir, { recursive: true });
|
||||
const thumbPath = path.join(subDir, 'thumb.jpg');
|
||||
fs.writeFileSync(thumbPath, 'thumbnail-data');
|
||||
|
||||
setupStaticRoute();
|
||||
|
||||
const response = await request(app).get('/images/thumbs/thumb.jpg').expect(200);
|
||||
|
||||
expect(response.body.toString()).toBe('thumbnail-data');
|
||||
|
||||
// Clean up
|
||||
fs.rmSync(subDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,14 +1,28 @@
|
||||
const express = require('express');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { actionDelimiter, SystemRoles, removeNullishValues } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const {
|
||||
SystemRoles,
|
||||
Permissions,
|
||||
PermissionTypes,
|
||||
actionDelimiter,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { getAgent, updateAgent } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const checkAgentCreate = generateCheckAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
// If the user has ADMIN role
|
||||
// then action edition is possible even if not owner of the assistant
|
||||
const isAdmin = (req) => {
|
||||
@@ -41,7 +55,7 @@ router.get('/', async (req, res) => {
|
||||
* @param {ActionMetadata} req.body.metadata - Metadata for the action.
|
||||
* @returns {Object} 200 - success response - application/json
|
||||
*/
|
||||
router.post('/:agent_id', async (req, res) => {
|
||||
router.post('/:agent_id', checkAgentCreate, async (req, res) => {
|
||||
try {
|
||||
const { agent_id } = req.params;
|
||||
|
||||
@@ -149,7 +163,7 @@ router.post('/:agent_id', async (req, res) => {
|
||||
* @param {string} req.params.action_id - The ID of the action to delete.
|
||||
* @returns {Object} 200 - success response - application/json
|
||||
*/
|
||||
router.delete('/:agent_id/:action_id', async (req, res) => {
|
||||
router.delete('/:agent_id/:action_id', checkAgentCreate, async (req, res) => {
|
||||
try {
|
||||
const { agent_id, action_id } = req.params;
|
||||
const admin = isAdmin(req);
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
const express = require('express');
|
||||
const { generateCheckAccess, skipAgentCheck } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const {
|
||||
setHeaders,
|
||||
moderateText,
|
||||
// validateModel,
|
||||
generateCheckAccess,
|
||||
validateConvoAccess,
|
||||
buildEndpointOption,
|
||||
} = require('~/server/middleware');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/agents');
|
||||
const AgentController = require('~/server/controllers/agents/request');
|
||||
const addTitle = require('~/server/services/Endpoints/agents/title');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.use(moderateText);
|
||||
|
||||
const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]);
|
||||
const checkAgentAccess = generateCheckAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permissions: [Permissions.USE],
|
||||
skipCheck: skipAgentCheck,
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
router.use(checkAgentAccess);
|
||||
router.use(validateConvoAccess);
|
||||
|
||||
@@ -1,29 +1,36 @@
|
||||
const express = require('express');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const v1 = require('~/server/controllers/agents/v1');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const actions = require('./actions');
|
||||
const tools = require('./tools');
|
||||
|
||||
const router = express.Router();
|
||||
const avatar = express.Router();
|
||||
|
||||
const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]);
|
||||
const checkAgentCreate = generateCheckAccess(PermissionTypes.AGENTS, [
|
||||
Permissions.USE,
|
||||
Permissions.CREATE,
|
||||
]);
|
||||
const checkAgentAccess = generateCheckAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkAgentCreate = generateCheckAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
const checkGlobalAgentShare = generateCheckAccess(
|
||||
PermissionTypes.AGENTS,
|
||||
[Permissions.USE, Permissions.CREATE],
|
||||
{
|
||||
const checkGlobalAgentShare = generateCheckAccess({
|
||||
permissionType: PermissionTypes.AGENTS,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
bodyProps: {
|
||||
[Permissions.SHARED_GLOBAL]: ['projectIds', 'removeProjectIds'],
|
||||
},
|
||||
);
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkAgentAccess);
|
||||
|
||||
/**
|
||||
* Agent actions route.
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
const multer = require('multer');
|
||||
const express = require('express');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getConvosByCursor, deleteConvos, getConvo, saveConvo } = require('~/models/Conversation');
|
||||
const { forkConversation, duplicateConversation } = require('~/server/utils/import/fork');
|
||||
const { createImportLimiters, createForkLimiters } = require('~/server/middleware');
|
||||
const { storage, importFileFilter } = require('~/server/routes/files/multer');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { importConversations } = require('~/server/utils/import');
|
||||
const { createImportLimiters } = require('~/server/middleware');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
const { isEnabled, sleep } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const assistantClients = {
|
||||
[EModelEndpoint.azureAssistants]: require('~/server/services/Endpoints/azureAssistants'),
|
||||
@@ -43,6 +44,7 @@ router.get('/', async (req, res) => {
|
||||
});
|
||||
res.status(200).json(result);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching conversations', error);
|
||||
res.status(500).json({ error: 'Error fetching conversations' });
|
||||
}
|
||||
});
|
||||
@@ -156,6 +158,7 @@ router.post('/update', async (req, res) => {
|
||||
});
|
||||
|
||||
const { importIpLimiter, importUserLimiter } = createImportLimiters();
|
||||
const { forkIpLimiter, forkUserLimiter } = createForkLimiters();
|
||||
const upload = multer({ storage: storage, fileFilter: importFileFilter });
|
||||
|
||||
/**
|
||||
@@ -189,7 +192,7 @@ router.post(
|
||||
* @param {express.Response<TForkConvoResponse>} res - Express response object.
|
||||
* @returns {Promise<void>} - The response after forking the conversation.
|
||||
*/
|
||||
router.post('/fork', async (req, res) => {
|
||||
router.post('/fork', forkIpLimiter, forkUserLimiter, async (req, res) => {
|
||||
try {
|
||||
/** @type {TForkConvoRequest} */
|
||||
const { conversationId, messageId, option, splitAtTarget, latestMessageId } = req.body;
|
||||
|
||||
282
api/server/routes/files/files.agents.test.js
Normal file
282
api/server/routes/files/files.agents.test.js
Normal file
@@ -0,0 +1,282 @@
|
||||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('~/server/services/Files/process', () => ({
|
||||
processDeleteRequest: jest.fn().mockResolvedValue({}),
|
||||
filterFile: jest.fn(),
|
||||
processFileUpload: jest.fn(),
|
||||
processAgentFileUpload: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(() => ({})),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/controllers/assistants/helpers', () => ({
|
||||
getOpenAIClient: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||
loadAuthValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/S3/crud', () => ({
|
||||
refreshS3FileUrls: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(() => ({
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const { createFile } = require('~/models/File');
|
||||
const { createAgent } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
|
||||
// Import the router after mocks
|
||||
const router = require('./files');
|
||||
|
||||
describe('File Routes - Agent Files Endpoint', () => {
|
||||
let app;
|
||||
let mongoServer;
|
||||
let authorId;
|
||||
let otherUserId;
|
||||
let agentId;
|
||||
let fileId1;
|
||||
let fileId2;
|
||||
let fileId3;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
await mongoose.connect(mongoServer.getUri());
|
||||
|
||||
// Initialize models
|
||||
require('~/db/models');
|
||||
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Mock authentication middleware
|
||||
app.use((req, res, next) => {
|
||||
req.user = { id: otherUserId || 'default-user' };
|
||||
req.app = { locals: {} };
|
||||
next();
|
||||
});
|
||||
|
||||
app.use('/files', router);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Clear database
|
||||
const collections = mongoose.connection.collections;
|
||||
for (const key in collections) {
|
||||
await collections[key].deleteMany({});
|
||||
}
|
||||
|
||||
authorId = new mongoose.Types.ObjectId().toString();
|
||||
otherUserId = new mongoose.Types.ObjectId().toString();
|
||||
agentId = uuidv4();
|
||||
fileId1 = uuidv4();
|
||||
fileId2 = uuidv4();
|
||||
fileId3 = uuidv4();
|
||||
|
||||
// Create files
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: fileId1,
|
||||
filename: 'agent-file1.txt',
|
||||
filepath: `/uploads/${authorId}/${fileId1}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: fileId2,
|
||||
filename: 'agent-file2.txt',
|
||||
filepath: `/uploads/${authorId}/${fileId2}`,
|
||||
bytes: 2048,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
await createFile({
|
||||
user: otherUserId,
|
||||
file_id: fileId3,
|
||||
filename: 'user-file.txt',
|
||||
filepath: `/uploads/${otherUserId}/${fileId3}`,
|
||||
bytes: 512,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
// Create an agent with files attached
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
isCollaborative: true,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileId1, fileId2],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Share the agent globally
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
||||
if (globalProject) {
|
||||
const { updateAgent } = require('~/models/Agent');
|
||||
await updateAgent({ id: agentId }, { projectIds: [globalProject._id] });
|
||||
}
|
||||
});
|
||||
|
||||
describe('GET /files/agent/:agent_id', () => {
|
||||
it('should return files accessible through the agent for non-author', async () => {
|
||||
const response = await request(app).get(`/files/agent/${agentId}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toHaveLength(2); // Only agent files, not user-owned files
|
||||
|
||||
const fileIds = response.body.map((f) => f.file_id);
|
||||
expect(fileIds).toContain(fileId1);
|
||||
expect(fileIds).toContain(fileId2);
|
||||
expect(fileIds).not.toContain(fileId3); // User's own file not included
|
||||
});
|
||||
|
||||
it('should return 400 when agent_id is not provided', async () => {
|
||||
const response = await request(app).get('/files/agent/');
|
||||
|
||||
expect(response.status).toBe(404); // Express returns 404 for missing route parameter
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent agent', async () => {
|
||||
const response = await request(app).get('/files/agent/non-existent-agent');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]); // Empty array for non-existent agent
|
||||
});
|
||||
|
||||
it('should return empty array when agent is not collaborative', async () => {
|
||||
// Create a non-collaborative agent
|
||||
const nonCollabAgentId = uuidv4();
|
||||
await createAgent({
|
||||
id: nonCollabAgentId,
|
||||
name: 'Non-Collaborative Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
isCollaborative: false,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileId1],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Share it globally
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
||||
if (globalProject) {
|
||||
const { updateAgent } = require('~/models/Agent');
|
||||
await updateAgent({ id: nonCollabAgentId }, { projectIds: [globalProject._id] });
|
||||
}
|
||||
|
||||
const response = await request(app).get(`/files/agent/${nonCollabAgentId}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]); // Empty array when not collaborative
|
||||
});
|
||||
|
||||
it('should return agent files for agent author', async () => {
|
||||
// Create a new app instance with author authentication
|
||||
const authorApp = express();
|
||||
authorApp.use(express.json());
|
||||
authorApp.use((req, res, next) => {
|
||||
req.user = { id: authorId };
|
||||
req.app = { locals: {} };
|
||||
next();
|
||||
});
|
||||
authorApp.use('/files', router);
|
||||
|
||||
const response = await request(authorApp).get(`/files/agent/${agentId}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toHaveLength(2); // Agent files for author
|
||||
|
||||
const fileIds = response.body.map((f) => f.file_id);
|
||||
expect(fileIds).toContain(fileId1);
|
||||
expect(fileIds).toContain(fileId2);
|
||||
expect(fileIds).not.toContain(fileId3); // User's own file not included
|
||||
});
|
||||
|
||||
it('should return files uploaded by other users to shared agent for author', async () => {
|
||||
// Create a file uploaded by another user
|
||||
const otherUserFileId = uuidv4();
|
||||
const anotherUserId = new mongoose.Types.ObjectId().toString();
|
||||
|
||||
await createFile({
|
||||
user: anotherUserId,
|
||||
file_id: otherUserFileId,
|
||||
filename: 'other-user-file.txt',
|
||||
filepath: `/uploads/${anotherUserId}/${otherUserFileId}`,
|
||||
bytes: 4096,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
// Update agent to include the file uploaded by another user
|
||||
const { updateAgent } = require('~/models/Agent');
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileId1, fileId2, otherUserFileId],
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Create app instance with author authentication
|
||||
const authorApp = express();
|
||||
authorApp.use(express.json());
|
||||
authorApp.use((req, res, next) => {
|
||||
req.user = { id: authorId };
|
||||
req.app = { locals: {} };
|
||||
next();
|
||||
});
|
||||
authorApp.use('/files', router);
|
||||
|
||||
const response = await request(authorApp).get(`/files/agent/${agentId}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toHaveLength(3); // Including file from another user
|
||||
|
||||
const fileIds = response.body.map((f) => f.file_id);
|
||||
expect(fileIds).toContain(fileId1);
|
||||
expect(fileIds).toContain(fileId2);
|
||||
expect(fileIds).toContain(otherUserFileId); // File uploaded by another user
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,7 @@ const {
|
||||
Time,
|
||||
isUUID,
|
||||
CacheKeys,
|
||||
Constants,
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
@@ -16,11 +17,12 @@ const {
|
||||
processDeleteRequest,
|
||||
processAgentFileUpload,
|
||||
} = require('~/server/services/Files/process');
|
||||
const { getFiles, batchUpdateFiles, hasAccessToFilesViaAgent } = require('~/models/File');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
||||
const { getFiles, batchUpdateFiles } = require('~/models/File');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { getAssistant } = require('~/models/Assistant');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getLogStores } = require('~/cache');
|
||||
@@ -50,6 +52,68 @@ router.get('/', async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Get files specific to an agent
|
||||
* @route GET /files/agent/:agent_id
|
||||
* @param {string} agent_id - The agent ID to get files for
|
||||
* @returns {Promise<TFile[]>} Array of files attached to the agent
|
||||
*/
|
||||
router.get('/agent/:agent_id', async (req, res) => {
|
||||
try {
|
||||
const { agent_id } = req.params;
|
||||
const userId = req.user.id;
|
||||
|
||||
if (!agent_id) {
|
||||
return res.status(400).json({ error: 'Agent ID is required' });
|
||||
}
|
||||
|
||||
// Get the agent to check ownership and attached files
|
||||
const agent = await getAgent({ id: agent_id });
|
||||
|
||||
if (!agent) {
|
||||
// No agent found, return empty array
|
||||
return res.status(200).json([]);
|
||||
}
|
||||
|
||||
// Check if user has access to the agent
|
||||
if (agent.author.toString() !== userId) {
|
||||
// Non-authors need the agent to be globally shared and collaborative
|
||||
const globalProject = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, '_id');
|
||||
|
||||
if (
|
||||
!globalProject ||
|
||||
!agent.projectIds.some((pid) => pid.toString() === globalProject._id.toString()) ||
|
||||
!agent.isCollaborative
|
||||
) {
|
||||
return res.status(200).json([]);
|
||||
}
|
||||
}
|
||||
|
||||
// Collect all file IDs from agent's tool resources
|
||||
const agentFileIds = [];
|
||||
if (agent.tool_resources) {
|
||||
for (const [, resource] of Object.entries(agent.tool_resources)) {
|
||||
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
|
||||
agentFileIds.push(...resource.file_ids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no files attached to agent, return empty array
|
||||
if (agentFileIds.length === 0) {
|
||||
return res.status(200).json([]);
|
||||
}
|
||||
|
||||
// Get only the files attached to this agent
|
||||
const files = await getFiles({ file_id: { $in: agentFileIds } }, null, { text: 0 });
|
||||
|
||||
res.status(200).json(files);
|
||||
} catch (error) {
|
||||
logger.error('[/files/agent/:agent_id] Error fetching agent files:', error);
|
||||
res.status(500).json({ error: 'Failed to fetch agent files' });
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/config', async (req, res) => {
|
||||
try {
|
||||
res.status(200).json(req.app.locals.fileConfig);
|
||||
@@ -86,11 +150,62 @@ router.delete('/', async (req, res) => {
|
||||
|
||||
const fileIds = files.map((file) => file.file_id);
|
||||
const dbFiles = await getFiles({ file_id: { $in: fileIds } });
|
||||
const unauthorizedFiles = dbFiles.filter((file) => file.user.toString() !== req.user.id);
|
||||
|
||||
const ownedFiles = [];
|
||||
const nonOwnedFiles = [];
|
||||
const fileMap = new Map();
|
||||
|
||||
for (const file of dbFiles) {
|
||||
fileMap.set(file.file_id, file);
|
||||
if (file.user.toString() === req.user.id) {
|
||||
ownedFiles.push(file);
|
||||
} else {
|
||||
nonOwnedFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
// If all files are owned by the user, no need for further checks
|
||||
if (nonOwnedFiles.length === 0) {
|
||||
await processDeleteRequest({ req, files: ownedFiles });
|
||||
logger.debug(
|
||||
`[/files] Files deleted successfully: ${ownedFiles
|
||||
.filter((f) => f.file_id)
|
||||
.map((f) => f.file_id)
|
||||
.join(', ')}`,
|
||||
);
|
||||
res.status(200).json({ message: 'Files deleted successfully' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Check access for non-owned files
|
||||
let authorizedFiles = [...ownedFiles];
|
||||
let unauthorizedFiles = [];
|
||||
|
||||
if (req.body.agent_id && nonOwnedFiles.length > 0) {
|
||||
// Batch check access for all non-owned files
|
||||
const nonOwnedFileIds = nonOwnedFiles.map((f) => f.file_id);
|
||||
const accessMap = await hasAccessToFilesViaAgent(
|
||||
req.user.id,
|
||||
nonOwnedFileIds,
|
||||
req.body.agent_id,
|
||||
);
|
||||
|
||||
// Separate authorized and unauthorized files
|
||||
for (const file of nonOwnedFiles) {
|
||||
if (accessMap.get(file.file_id)) {
|
||||
authorizedFiles.push(file);
|
||||
} else {
|
||||
unauthorizedFiles.push(file);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No agent context, all non-owned files are unauthorized
|
||||
unauthorizedFiles = nonOwnedFiles;
|
||||
}
|
||||
|
||||
if (unauthorizedFiles.length > 0) {
|
||||
return res.status(403).json({
|
||||
message: 'You can only delete your own files',
|
||||
message: 'You can only delete files you have access to',
|
||||
unauthorizedFiles: unauthorizedFiles.map((f) => f.file_id),
|
||||
});
|
||||
}
|
||||
@@ -131,10 +246,10 @@ router.delete('/', async (req, res) => {
|
||||
.json({ message: 'File associations removed successfully from Azure Assistant' });
|
||||
}
|
||||
|
||||
await processDeleteRequest({ req, files: dbFiles });
|
||||
await processDeleteRequest({ req, files: authorizedFiles });
|
||||
|
||||
logger.debug(
|
||||
`[/files] Files deleted successfully: ${files
|
||||
`[/files] Files deleted successfully: ${authorizedFiles
|
||||
.filter((f) => f.file_id)
|
||||
.map((f) => f.file_id)
|
||||
.join(', ')}`,
|
||||
|
||||
302
api/server/routes/files/files.test.js
Normal file
302
api/server/routes/files/files.test.js
Normal file
@@ -0,0 +1,302 @@
|
||||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('~/server/services/Files/process', () => ({
|
||||
processDeleteRequest: jest.fn().mockResolvedValue({}),
|
||||
filterFile: jest.fn(),
|
||||
processFileUpload: jest.fn(),
|
||||
processAgentFileUpload: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(() => ({})),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/controllers/assistants/helpers', () => ({
|
||||
getOpenAIClient: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||
loadAuthValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/S3/crud', () => ({
|
||||
refreshS3FileUrls: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(() => ({
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const { createFile } = require('~/models/File');
|
||||
const { createAgent } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
|
||||
// Import the router after mocks
|
||||
const router = require('./files');
|
||||
|
||||
describe('File Routes - Delete with Agent Access', () => {
|
||||
let app;
|
||||
let mongoServer;
|
||||
let authorId;
|
||||
let otherUserId;
|
||||
let agentId;
|
||||
let fileId;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
await mongoose.connect(mongoServer.getUri());
|
||||
|
||||
// Initialize models
|
||||
require('~/db/models');
|
||||
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Mock authentication middleware
|
||||
app.use((req, res, next) => {
|
||||
req.user = { id: otherUserId || 'default-user' };
|
||||
req.app = { locals: {} };
|
||||
next();
|
||||
});
|
||||
|
||||
app.use('/files', router);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Clear database
|
||||
const collections = mongoose.connection.collections;
|
||||
for (const key in collections) {
|
||||
await collections[key].deleteMany({});
|
||||
}
|
||||
|
||||
authorId = new mongoose.Types.ObjectId().toString();
|
||||
otherUserId = new mongoose.Types.ObjectId().toString();
|
||||
fileId = uuidv4();
|
||||
|
||||
// Create a file owned by the author
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: fileId,
|
||||
filename: 'test.txt',
|
||||
filepath: `/uploads/${authorId}/${fileId}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
// Create an agent with the file attached
|
||||
const agent = await createAgent({
|
||||
id: uuidv4(),
|
||||
name: 'Test Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
isCollaborative: true,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: [fileId],
|
||||
},
|
||||
},
|
||||
});
|
||||
agentId = agent.id;
|
||||
|
||||
// Share the agent globally
|
||||
const globalProject = await getProjectByName(GLOBAL_PROJECT_NAME, '_id');
|
||||
if (globalProject) {
|
||||
const { updateAgent } = require('~/models/Agent');
|
||||
await updateAgent({ id: agentId }, { projectIds: [globalProject._id] });
|
||||
}
|
||||
});
|
||||
|
||||
describe('DELETE /files', () => {
|
||||
it('should allow deleting files owned by the user', async () => {
|
||||
// Create a file owned by the current user
|
||||
const userFileId = uuidv4();
|
||||
await createFile({
|
||||
user: otherUserId,
|
||||
file_id: userFileId,
|
||||
filename: 'user-file.txt',
|
||||
filepath: `/uploads/${otherUserId}/${userFileId}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
files: [
|
||||
{
|
||||
file_id: userFileId,
|
||||
filepath: `/uploads/${otherUserId}/${userFileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Files deleted successfully');
|
||||
expect(processDeleteRequest).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prevent deleting files not owned by user without agent context', async () => {
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
files: [
|
||||
{
|
||||
file_id: fileId,
|
||||
filepath: `/uploads/${authorId}/${fileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(response.body.message).toBe('You can only delete files you have access to');
|
||||
expect(response.body.unauthorizedFiles).toContain(fileId);
|
||||
expect(processDeleteRequest).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow deleting files accessible through shared agent', async () => {
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
agent_id: agentId,
|
||||
files: [
|
||||
{
|
||||
file_id: fileId,
|
||||
filepath: `/uploads/${authorId}/${fileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Files deleted successfully');
|
||||
expect(processDeleteRequest).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should prevent deleting files not attached to the specified agent', async () => {
|
||||
// Create another file not attached to the agent
|
||||
const unattachedFileId = uuidv4();
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: unattachedFileId,
|
||||
filename: 'unattached.txt',
|
||||
filepath: `/uploads/${authorId}/${unattachedFileId}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
agent_id: agentId,
|
||||
files: [
|
||||
{
|
||||
file_id: unattachedFileId,
|
||||
filepath: `/uploads/${authorId}/${unattachedFileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(response.body.message).toBe('You can only delete files you have access to');
|
||||
expect(response.body.unauthorizedFiles).toContain(unattachedFileId);
|
||||
});
|
||||
|
||||
it('should handle mixed authorized and unauthorized files', async () => {
|
||||
// Create a file owned by the current user
|
||||
const userFileId = uuidv4();
|
||||
await createFile({
|
||||
user: otherUserId,
|
||||
file_id: userFileId,
|
||||
filename: 'user-file.txt',
|
||||
filepath: `/uploads/${otherUserId}/${userFileId}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
// Create an unauthorized file
|
||||
const unauthorizedFileId = uuidv4();
|
||||
await createFile({
|
||||
user: authorId,
|
||||
file_id: unauthorizedFileId,
|
||||
filename: 'unauthorized.txt',
|
||||
filepath: `/uploads/${authorId}/${unauthorizedFileId}`,
|
||||
bytes: 1024,
|
||||
type: 'text/plain',
|
||||
});
|
||||
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
agent_id: agentId,
|
||||
files: [
|
||||
{
|
||||
file_id: fileId, // Authorized through agent
|
||||
filepath: `/uploads/${authorId}/${fileId}`,
|
||||
},
|
||||
{
|
||||
file_id: userFileId, // Owned by user
|
||||
filepath: `/uploads/${otherUserId}/${userFileId}`,
|
||||
},
|
||||
{
|
||||
file_id: unauthorizedFileId, // Not authorized
|
||||
filepath: `/uploads/${authorId}/${unauthorizedFileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(response.body.message).toBe('You can only delete files you have access to');
|
||||
expect(response.body.unauthorizedFiles).toContain(unauthorizedFileId);
|
||||
expect(response.body.unauthorizedFiles).not.toContain(fileId);
|
||||
expect(response.body.unauthorizedFiles).not.toContain(userFileId);
|
||||
});
|
||||
|
||||
it('should prevent deleting files when agent is not collaborative', async () => {
|
||||
// Update the agent to be non-collaborative
|
||||
const { updateAgent } = require('~/models/Agent');
|
||||
await updateAgent({ id: agentId }, { isCollaborative: false });
|
||||
|
||||
const response = await request(app)
|
||||
.delete('/files')
|
||||
.send({
|
||||
agent_id: agentId,
|
||||
files: [
|
||||
{
|
||||
file_id: fileId,
|
||||
filepath: `/uploads/${authorId}/${fileId}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(response.body.message).toBe('You can only delete files you have access to');
|
||||
expect(response.body.unauthorizedFiles).toContain(fileId);
|
||||
expect(processDeleteRequest).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -477,7 +477,9 @@ describe('Multer Configuration', () => {
|
||||
done(new Error('Expected mkdirSync to throw an error but no error was thrown'));
|
||||
} catch (error) {
|
||||
// This is the expected behavior - mkdirSync throws synchronously for invalid paths
|
||||
expect(error.code).toBe('EACCES');
|
||||
// On Linux, this typically returns EACCES (permission denied)
|
||||
// On macOS/Darwin, this returns ENOENT (no such file or directory)
|
||||
expect(['EACCES', 'ENOENT']).toContain(error.code);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,37 +1,43 @@
|
||||
const express = require('express');
|
||||
const { Tokenizer } = require('@librechat/api');
|
||||
const { Tokenizer, generateCheckAccess } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const {
|
||||
getAllUserMemories,
|
||||
toggleUserMemories,
|
||||
createMemory,
|
||||
setMemory,
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const checkMemoryRead = generateCheckAccess(PermissionTypes.MEMORIES, [
|
||||
Permissions.USE,
|
||||
Permissions.READ,
|
||||
]);
|
||||
const checkMemoryCreate = generateCheckAccess(PermissionTypes.MEMORIES, [
|
||||
Permissions.USE,
|
||||
Permissions.CREATE,
|
||||
]);
|
||||
const checkMemoryUpdate = generateCheckAccess(PermissionTypes.MEMORIES, [
|
||||
Permissions.USE,
|
||||
Permissions.UPDATE,
|
||||
]);
|
||||
const checkMemoryDelete = generateCheckAccess(PermissionTypes.MEMORIES, [
|
||||
Permissions.USE,
|
||||
Permissions.UPDATE,
|
||||
]);
|
||||
const checkMemoryOptOut = generateCheckAccess(PermissionTypes.MEMORIES, [
|
||||
Permissions.USE,
|
||||
Permissions.OPT_OUT,
|
||||
]);
|
||||
const checkMemoryRead = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.READ],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkMemoryCreate = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkMemoryUpdate = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.UPDATE],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkMemoryDelete = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.UPDATE],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkMemoryOptOut = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.OPT_OUT],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
|
||||
@@ -166,40 +172,68 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
|
||||
/**
|
||||
* PATCH /memories/:key
|
||||
* Updates the value of an existing memory entry for the authenticated user.
|
||||
* Body: { value: string }
|
||||
* Body: { key?: string, value: string }
|
||||
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
|
||||
*/
|
||||
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
const { key } = req.params;
|
||||
const { value } = req.body || {};
|
||||
const { key: urlKey } = req.params;
|
||||
const { key: bodyKey, value } = req.body || {};
|
||||
|
||||
if (typeof value !== 'string' || value.trim() === '') {
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
// Use the key from the body if provided, otherwise use the key from the URL
|
||||
const newKey = bodyKey || urlKey;
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
const existingMemory = memories.find((m) => m.key === key);
|
||||
const existingMemory = memories.find((m) => m.key === urlKey);
|
||||
|
||||
if (!existingMemory) {
|
||||
return res.status(404).json({ error: 'Memory not found.' });
|
||||
}
|
||||
|
||||
const result = await setMemory({
|
||||
userId: req.user.id,
|
||||
key,
|
||||
value,
|
||||
tokenCount,
|
||||
});
|
||||
// If the key is changing, we need to handle it specially
|
||||
if (newKey !== urlKey) {
|
||||
const keyExists = memories.find((m) => m.key === newKey);
|
||||
if (keyExists) {
|
||||
return res.status(409).json({ error: 'Memory with this key already exists.' });
|
||||
}
|
||||
|
||||
if (!result.ok) {
|
||||
return res.status(500).json({ error: 'Failed to update memory.' });
|
||||
const createResult = await createMemory({
|
||||
userId: req.user.id,
|
||||
key: newKey,
|
||||
value,
|
||||
tokenCount,
|
||||
});
|
||||
|
||||
if (!createResult.ok) {
|
||||
return res.status(500).json({ error: 'Failed to create new memory.' });
|
||||
}
|
||||
|
||||
const deleteResult = await deleteMemory({ userId: req.user.id, key: urlKey });
|
||||
if (!deleteResult.ok) {
|
||||
return res.status(500).json({ error: 'Failed to delete old memory.' });
|
||||
}
|
||||
} else {
|
||||
// Key is not changing, just update the value
|
||||
const result = await setMemory({
|
||||
userId: req.user.id,
|
||||
key: newKey,
|
||||
value,
|
||||
tokenCount,
|
||||
});
|
||||
|
||||
if (!result.ok) {
|
||||
return res.status(500).json({ error: 'Failed to update memory.' });
|
||||
}
|
||||
}
|
||||
|
||||
const updatedMemories = await getAllUserMemories(req.user.id);
|
||||
const updatedMemory = updatedMemories.find((m) => m.key === key);
|
||||
const updatedMemory = updatedMemories.find((m) => m.key === newKey);
|
||||
|
||||
res.json({ updated: true, memory: updatedMemory });
|
||||
} catch (error) {
|
||||
|
||||
@@ -235,12 +235,13 @@ router.put('/:conversationId/:messageId', validateMessageReq, async (req, res) =
|
||||
return res.status(400).json({ error: 'Content part not found' });
|
||||
}
|
||||
|
||||
if (updatedContent[index].type !== ContentTypes.TEXT) {
|
||||
const currentPartType = updatedContent[index].type;
|
||||
if (currentPartType !== ContentTypes.TEXT && currentPartType !== ContentTypes.THINK) {
|
||||
return res.status(400).json({ error: 'Cannot update non-text content' });
|
||||
}
|
||||
|
||||
const oldText = updatedContent[index].text;
|
||||
updatedContent[index] = { type: ContentTypes.TEXT, text };
|
||||
const oldText = updatedContent[index][currentPartType];
|
||||
updatedContent[index] = { type: currentPartType, [currentPartType]: text };
|
||||
|
||||
let tokenCount = message.tokenCount;
|
||||
if (tokenCount !== undefined) {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const express = require('express');
|
||||
const { PermissionTypes, Permissions, SystemRoles } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { Permissions, SystemRoles, PermissionTypes } = require('librechat-data-provider');
|
||||
const {
|
||||
getPrompt,
|
||||
getPrompts,
|
||||
@@ -14,24 +16,30 @@ const {
|
||||
// updatePromptLabels,
|
||||
makePromptProduction,
|
||||
} = require('~/models/Prompt');
|
||||
const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware');
|
||||
const { logger } = require('~/config');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const checkPromptAccess = generateCheckAccess(PermissionTypes.PROMPTS, [Permissions.USE]);
|
||||
const checkPromptCreate = generateCheckAccess(PermissionTypes.PROMPTS, [
|
||||
Permissions.USE,
|
||||
Permissions.CREATE,
|
||||
]);
|
||||
const checkPromptAccess = generateCheckAccess({
|
||||
permissionType: PermissionTypes.PROMPTS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
const checkPromptCreate = generateCheckAccess({
|
||||
permissionType: PermissionTypes.PROMPTS,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
const checkGlobalPromptShare = generateCheckAccess(
|
||||
PermissionTypes.PROMPTS,
|
||||
[Permissions.USE, Permissions.CREATE],
|
||||
{
|
||||
const checkGlobalPromptShare = generateCheckAccess({
|
||||
permissionType: PermissionTypes.PROMPTS,
|
||||
permissions: [Permissions.USE, Permissions.CREATE],
|
||||
bodyProps: {
|
||||
[Permissions.SHARED_GLOBAL]: ['projectIds', 'removeProjectIds'],
|
||||
},
|
||||
);
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkPromptAccess);
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
const express = require('express');
|
||||
const staticCache = require('../utils/staticCache');
|
||||
const paths = require('~/config/paths');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
|
||||
const skipGzipScan = !isEnabled(process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN);
|
||||
|
||||
const router = express.Router();
|
||||
router.use(staticCache(paths.imageOutput));
|
||||
router.use(staticCache(paths.imageOutput, { skipGzipScan }));
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const {
|
||||
getConversationTags,
|
||||
updateTagsForConversation,
|
||||
updateConversationTag,
|
||||
createConversationTag,
|
||||
deleteConversationTag,
|
||||
updateTagsForConversation,
|
||||
getConversationTags,
|
||||
} = require('~/models/ConversationTag');
|
||||
const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware');
|
||||
const { logger } = require('~/config');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const checkBookmarkAccess = generateCheckAccess(PermissionTypes.BOOKMARKS, [Permissions.USE]);
|
||||
const checkBookmarkAccess = generateCheckAccess({
|
||||
permissionType: PermissionTypes.BOOKMARKS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkBookmarkAccess);
|
||||
|
||||
@@ -152,12 +152,14 @@ describe('AppService', () => {
|
||||
filteredTools: undefined,
|
||||
includedTools: undefined,
|
||||
webSearch: {
|
||||
safeSearch: 1,
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
searxngApiKey: '${SEARXNG_API_KEY}',
|
||||
firecrawlApiKey: '${FIRECRAWL_API_KEY}',
|
||||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
safeSearch: 1,
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
searxngInstanceUrl: '${SEARXNG_INSTANCE_URL}',
|
||||
},
|
||||
memory: undefined,
|
||||
agents: {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
const { klona } = require('klona');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
StepTypes,
|
||||
RunStatus,
|
||||
@@ -11,11 +14,10 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { processRequiredActions } = require('~/server/services/ToolService');
|
||||
const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
|
||||
const { RunManager, waitForRun } = require('~/server/services/Runs');
|
||||
const { processMessages } = require('~/server/services/Threads');
|
||||
const { createOnProgress } = require('~/server/utils');
|
||||
const { TextStream } = require('~/app/clients');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Sorts, processes, and flattens messages to a single string.
|
||||
@@ -64,7 +66,7 @@ async function createOnTextProgress({
|
||||
};
|
||||
|
||||
logger.debug('Content data:', contentData);
|
||||
sendMessage(openai.res, contentData);
|
||||
sendEvent(openai.res, contentData);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const bcrypt = require('bcryptjs');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { webcrypto } = require('node:crypto');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
@@ -499,6 +500,18 @@ const resendVerificationEmail = async (req) => {
|
||||
};
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Generate a short-lived JWT token
|
||||
* @param {String} userId - The ID of the user
|
||||
* @param {String} [expireIn='5m'] - The expiration time for the token (default is 5 minutes)
|
||||
* @returns {String} - The generated JWT token
|
||||
*/
|
||||
const generateShortLivedToken = (userId, expireIn = '5m') => {
|
||||
return jwt.sign({ id: userId }, process.env.JWT_SECRET, {
|
||||
expiresIn: expireIn,
|
||||
algorithm: 'HS256',
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
logoutUser,
|
||||
@@ -506,7 +519,8 @@ module.exports = {
|
||||
registerUser,
|
||||
setAuthTokens,
|
||||
resetPassword,
|
||||
setOpenIDAuthTokens,
|
||||
requestPasswordReset,
|
||||
resendVerificationEmail,
|
||||
setOpenIDAuthTokens,
|
||||
generateShortLivedToken,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { isUserProvided } = require('@librechat/api');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { isUserProvided, generateConfig } = require('~/server/utils');
|
||||
const { generateConfig } = require('~/server/utils/handleText');
|
||||
|
||||
const {
|
||||
OPENAI_API_KEY: openAIApiKey,
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { isEnabled, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { normalizeEndpointName, isEnabled } = require('~/server/utils');
|
||||
const { normalizeEndpointName } = require('~/server/utils');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const { getCachedTools } = require('./getCachedTools');
|
||||
const { findPluginAuthsByKeys } = require('~/models');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
@@ -55,46 +54,48 @@ const getCustomEndpointConfig = async (endpoint) => {
|
||||
);
|
||||
};
|
||||
|
||||
async function createGetMCPAuthMap() {
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {string} params.userId
|
||||
* @param {GenericTool[]} [params.tools]
|
||||
* @param {import('@librechat/data-schemas').PluginAuthMethods['findPluginAuthsByKeys']} params.findPluginAuthsByKeys
|
||||
* @returns {Promise<Record<string, Record<string, string>> | undefined>}
|
||||
*/
|
||||
async function getMCPAuthMap({ userId, tools, findPluginAuthsByKeys }) {
|
||||
try {
|
||||
if (!tools || tools.length === 0) {
|
||||
return;
|
||||
}
|
||||
const appTools = await getCachedTools({
|
||||
userId,
|
||||
});
|
||||
return await getUserMCPAuthMap({
|
||||
tools,
|
||||
userId,
|
||||
appTools,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async function hasCustomUserVars() {
|
||||
const customConfig = await getCustomConfig();
|
||||
const mcpServers = customConfig?.mcpServers;
|
||||
const hasCustomUserVars = Object.values(mcpServers ?? {}).some((server) => server.customUserVars);
|
||||
if (!hasCustomUserVars) {
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {GenericTool[]} [params.tools]
|
||||
* @param {string} params.userId
|
||||
* @returns {Promise<Record<string, Record<string, string>> | undefined>}
|
||||
*/
|
||||
return async function ({ tools, userId }) {
|
||||
try {
|
||||
if (!tools || tools.length === 0) {
|
||||
return;
|
||||
}
|
||||
const appTools = await getCachedTools({
|
||||
userId,
|
||||
});
|
||||
return await getUserMCPAuthMap({
|
||||
tools,
|
||||
userId,
|
||||
appTools,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
};
|
||||
return Object.values(mcpServers ?? {}).some((server) => server.customUserVars);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMCPAuthMap,
|
||||
getCustomConfig,
|
||||
getBalanceConfig,
|
||||
createGetMCPAuthMap,
|
||||
hasCustomUserVars,
|
||||
getCustomEndpointConfig,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
const { CacheKeys, EModelEndpoint, orderEndpointsConfig } = require('librechat-data-provider');
|
||||
const {
|
||||
CacheKeys,
|
||||
EModelEndpoint,
|
||||
isAgentsEndpoint,
|
||||
orderEndpointsConfig,
|
||||
defaultAgentCapabilities,
|
||||
} = require('librechat-data-provider');
|
||||
const loadDefaultEndpointsConfig = require('./loadDefaultEConfig');
|
||||
const loadConfigEndpoints = require('./loadConfigEndpoints');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
@@ -80,8 +86,12 @@ async function getEndpointsConfig(req) {
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
const checkCapability = async (req, capability) => {
|
||||
const isAgents = isAgentsEndpoint(req.body?.original_endpoint || req.body?.endpoint);
|
||||
const endpointsConfig = await getEndpointsConfig(req);
|
||||
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
|
||||
const capabilities =
|
||||
isAgents || endpointsConfig?.[EModelEndpoint.agents]?.capabilities != null
|
||||
? (endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [])
|
||||
: defaultAgentCapabilities;
|
||||
return capabilities.includes(capability);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const path = require('path');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { loadServiceKey, isUserProvided } = require('@librechat/api');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const { config } = require('./EndpointService');
|
||||
|
||||
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } = config;
|
||||
@@ -9,37 +11,41 @@ const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, go
|
||||
* @param {Express.Request} req - The request object
|
||||
*/
|
||||
async function loadAsyncEndpoints(req) {
|
||||
let i = 0;
|
||||
let serviceKey, googleUserProvides;
|
||||
try {
|
||||
serviceKey = require('~/data/auth.json');
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
i++;
|
||||
|
||||
/** Check if GOOGLE_KEY is provided at all(including 'user_provided') */
|
||||
const isGoogleKeyProvided = googleKey && googleKey.trim() !== '';
|
||||
|
||||
if (isGoogleKeyProvided) {
|
||||
/** If GOOGLE_KEY is provided, check if it's user_provided */
|
||||
googleUserProvides = isUserProvided(googleKey);
|
||||
} else {
|
||||
/** Only attempt to load service key if GOOGLE_KEY is not provided */
|
||||
const serviceKeyPath =
|
||||
process.env.GOOGLE_SERVICE_KEY_FILE || path.join(__dirname, '../../..', 'data', 'auth.json');
|
||||
|
||||
try {
|
||||
serviceKey = await loadServiceKey(serviceKeyPath);
|
||||
} catch (error) {
|
||||
logger.error('Error loading service key', error);
|
||||
serviceKey = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (isUserProvided(googleKey)) {
|
||||
googleUserProvides = true;
|
||||
if (i <= 1) {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
const google = serviceKey || googleKey ? { userProvide: googleUserProvides } : false;
|
||||
const google = serviceKey || isGoogleKeyProvided ? { userProvide: googleUserProvides } : false;
|
||||
|
||||
const useAzure = req.app.locals[EModelEndpoint.azureOpenAI]?.plugins;
|
||||
const gptPlugins =
|
||||
useAzure || openAIApiKey || azureOpenAIApiKey
|
||||
? {
|
||||
availableAgents: ['classic', 'functions'],
|
||||
userProvide: useAzure ? false : userProvidedOpenAI,
|
||||
userProvideURL: useAzure
|
||||
? false
|
||||
: config[EModelEndpoint.openAI]?.userProvideURL ||
|
||||
availableAgents: ['classic', 'functions'],
|
||||
userProvide: useAzure ? false : userProvidedOpenAI,
|
||||
userProvideURL: useAzure
|
||||
? false
|
||||
: config[EModelEndpoint.openAI]?.userProvideURL ||
|
||||
config[EModelEndpoint.azureOpenAI]?.userProvideURL,
|
||||
azure: useAzurePlugins || useAzure,
|
||||
}
|
||||
azure: useAzurePlugins || useAzure,
|
||||
}
|
||||
: false;
|
||||
|
||||
return { google, gptPlugins };
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
const path = require('path');
|
||||
const {
|
||||
CacheKeys,
|
||||
configSchema,
|
||||
EImageOutputType,
|
||||
validateSettingDefinitions,
|
||||
agentParamSettings,
|
||||
paramSettings,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
const axios = require('axios');
|
||||
const yaml = require('js-yaml');
|
||||
const keyBy = require('lodash/keyBy');
|
||||
const { loadYaml } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
CacheKeys,
|
||||
configSchema,
|
||||
paramSettings,
|
||||
EImageOutputType,
|
||||
agentParamSettings,
|
||||
validateSettingDefinitions,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
|
||||
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
jest.mock('axios');
|
||||
jest.mock('~/cache/getLogStores');
|
||||
jest.mock('~/utils/loadYaml');
|
||||
jest.mock('@librechat/api', () => ({
|
||||
...jest.requireActual('@librechat/api'),
|
||||
loadYaml: jest.fn(),
|
||||
}));
|
||||
jest.mock('librechat-data-provider', () => {
|
||||
const actual = jest.requireActual('librechat-data-provider');
|
||||
return {
|
||||
@@ -30,11 +33,22 @@ jest.mock('librechat-data-provider', () => {
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => {
|
||||
return {
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const axios = require('axios');
|
||||
const { loadYaml } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
describe('loadCustomConfig', () => {
|
||||
const mockSet = jest.fn();
|
||||
|
||||
@@ -11,8 +11,8 @@ const {
|
||||
replaceSpecialVars,
|
||||
providerEndpointMap,
|
||||
} = require('librechat-data-provider');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const { getFiles, getToolFilesByIds } = require('~/models/File');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
@@ -82,10 +82,11 @@ const initializeAgent = async ({
|
||||
attachments: currentFiles,
|
||||
tool_resources: agent.tool_resources,
|
||||
requestFileSet: new Set(requestFiles?.map((file) => file.file_id)),
|
||||
agentId: agent.id,
|
||||
});
|
||||
|
||||
const provider = agent.provider;
|
||||
const { tools, toolContextMap } =
|
||||
const { tools: structuredTools, toolContextMap } =
|
||||
(await loadTools?.({
|
||||
req,
|
||||
res,
|
||||
@@ -140,6 +141,24 @@ const initializeAgent = async ({
|
||||
agent.provider = options.provider;
|
||||
}
|
||||
|
||||
/** @type {import('@librechat/agents').GenericTool[]} */
|
||||
let tools = options.tools?.length ? options.tools : structuredTools;
|
||||
if (
|
||||
(agent.provider === Providers.GOOGLE || agent.provider === Providers.VERTEXAI) &&
|
||||
options.tools?.length &&
|
||||
structuredTools?.length
|
||||
) {
|
||||
throw new Error(`{ "type": "${ErrorTypes.GOOGLE_TOOL_CONFLICT}"}`);
|
||||
} else if (
|
||||
(agent.provider === Providers.OPENAI ||
|
||||
agent.provider === Providers.AZURE ||
|
||||
agent.provider === Providers.ANTHROPIC) &&
|
||||
options.tools?.length &&
|
||||
structuredTools?.length
|
||||
) {
|
||||
tools = structuredTools.concat(options.tools);
|
||||
}
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
agent.model_parameters = { ...options.llmConfig };
|
||||
if (options.configOptions) {
|
||||
@@ -162,10 +181,10 @@ const initializeAgent = async ({
|
||||
|
||||
return {
|
||||
...agent,
|
||||
tools,
|
||||
attachments,
|
||||
resendFiles,
|
||||
toolContextMap,
|
||||
tools,
|
||||
maxContextTokens: (agentMaxContextTokens - maxTokens) * 0.9,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -78,7 +78,17 @@ function getLLMConfig(apiKey, options = {}) {
|
||||
requestOptions.anthropicApiUrl = options.reverseProxyUrl;
|
||||
}
|
||||
|
||||
const tools = [];
|
||||
|
||||
if (mergedOptions.web_search) {
|
||||
tools.push({
|
||||
type: 'web_search_20250305',
|
||||
name: 'web_search',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tools,
|
||||
/** @type {AnthropicClientOptions} */
|
||||
llmConfig: removeNullishValues(requestOptions),
|
||||
};
|
||||
|
||||
93
api/server/services/Endpoints/custom/initialize.spec.js
Normal file
93
api/server/services/Endpoints/custom/initialize.spec.js
Normal file
@@ -0,0 +1,93 @@
|
||||
const initializeClient = require('./initialize');
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
resolveHeaders: jest.fn(),
|
||||
getOpenAIConfig: jest.fn(),
|
||||
createHandleLLMNewToken: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
CacheKeys: { TOKEN_CONFIG: 'token_config' },
|
||||
ErrorTypes: { NO_USER_KEY: 'NO_USER_KEY', NO_BASE_URL: 'NO_BASE_URL' },
|
||||
envVarRegex: /\$\{([^}]+)\}/,
|
||||
FetchTokenConfig: {},
|
||||
extractEnvVariable: jest.fn((value) => value),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/agents', () => ({
|
||||
Providers: { OLLAMA: 'ollama' },
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/UserService', () => ({
|
||||
getUserKeyValues: jest.fn(),
|
||||
checkUserKeyExpiry: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCustomEndpointConfig: jest.fn().mockResolvedValue({
|
||||
apiKey: 'test-key',
|
||||
baseURL: 'https://test.com',
|
||||
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
models: { default: ['test-model'] },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/ModelService', () => ({
|
||||
fetchModels: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/app/clients/OpenAIClient', () => {
|
||||
return jest.fn().mockImplementation(() => ({
|
||||
options: {},
|
||||
}));
|
||||
});
|
||||
|
||||
jest.mock('~/server/utils', () => ({
|
||||
isUserProvided: jest.fn().mockReturnValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('~/cache/getLogStores', () =>
|
||||
jest.fn().mockReturnValue({
|
||||
get: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
describe('custom/initializeClient', () => {
|
||||
const mockRequest = {
|
||||
body: { endpoint: 'test-endpoint' },
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
app: { locals: {} },
|
||||
};
|
||||
const mockResponse = {};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('calls resolveHeaders with headers and user', async () => {
|
||||
const { resolveHeaders } = require('@librechat/api');
|
||||
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
||||
expect(resolveHeaders).toHaveBeenCalledWith(
|
||||
{ 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
{ id: 'user-123', email: 'test@example.com' },
|
||||
);
|
||||
});
|
||||
|
||||
it('throws if endpoint config is missing', async () => {
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
getCustomEndpointConfig.mockResolvedValueOnce(null);
|
||||
await expect(
|
||||
initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true }),
|
||||
).rejects.toThrow('Config not found for the test-endpoint custom endpoint.');
|
||||
});
|
||||
|
||||
it('throws if user is missing', async () => {
|
||||
await expect(
|
||||
initializeClient({
|
||||
req: { ...mockRequest, user: undefined },
|
||||
res: mockResponse,
|
||||
optionsOnly: true,
|
||||
}),
|
||||
).rejects.toThrow("Cannot read properties of undefined (reading 'id')");
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,7 @@
|
||||
const path = require('path');
|
||||
const { EModelEndpoint, AuthKeys } = require('librechat-data-provider');
|
||||
const { getGoogleConfig, isEnabled, loadServiceKey } = require('@librechat/api');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/google/llm');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { GoogleClient } = require('~/app');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
|
||||
@@ -16,10 +16,25 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
}
|
||||
|
||||
let serviceKey = {};
|
||||
try {
|
||||
serviceKey = require('~/data/auth.json');
|
||||
} catch (_e) {
|
||||
// Do nothing
|
||||
|
||||
/** Check if GOOGLE_KEY is provided at all (including 'user_provided') */
|
||||
const isGoogleKeyProvided =
|
||||
(GOOGLE_KEY && GOOGLE_KEY.trim() !== '') || (isUserProvided && userKey != null);
|
||||
|
||||
if (!isGoogleKeyProvided) {
|
||||
/** Only attempt to load service key if GOOGLE_KEY is not provided */
|
||||
try {
|
||||
const serviceKeyPath =
|
||||
process.env.GOOGLE_SERVICE_KEY_FILE ||
|
||||
path.join(__dirname, '../../../..', 'data', 'auth.json');
|
||||
serviceKey = await loadServiceKey(serviceKeyPath);
|
||||
if (!serviceKey) {
|
||||
serviceKey = {};
|
||||
}
|
||||
} catch (_e) {
|
||||
// Service key loading failed, but that's okay if not required
|
||||
serviceKey = {};
|
||||
}
|
||||
}
|
||||
|
||||
const credentials = isUserProvided
|
||||
@@ -65,7 +80,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
if (overrideModel) {
|
||||
clientOptions.modelOptions.model = overrideModel;
|
||||
}
|
||||
return getLLMConfig(credentials, clientOptions);
|
||||
return getGoogleConfig(credentials, clientOptions);
|
||||
}
|
||||
|
||||
const client = new GoogleClient(credentials, clientOptions);
|
||||
|
||||
@@ -7,6 +7,16 @@ const initCustom = require('~/server/services/Endpoints/custom/initialize');
|
||||
const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
||||
const { getCustomEndpointConfig } = require('~/server/services/Config');
|
||||
|
||||
/** Check if the provider is a known custom provider
|
||||
* @param {string | undefined} [provider] - The provider string
|
||||
* @returns {boolean} - True if the provider is a known custom provider, false otherwise
|
||||
*/
|
||||
function isKnownCustomProvider(provider) {
|
||||
return [Providers.XAI, Providers.OLLAMA, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
provider?.toLowerCase() || '',
|
||||
);
|
||||
}
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
@@ -46,6 +56,13 @@ async function getProviderConfig(provider) {
|
||||
overrideProvider = Providers.OPENAI;
|
||||
}
|
||||
|
||||
if (isKnownCustomProvider(overrideProvider || provider) && !customEndpointConfig) {
|
||||
customEndpointConfig = await getCustomEndpointConfig(provider);
|
||||
if (!customEndpointConfig) {
|
||||
throw new Error(`Provider ${provider} not supported`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
getOptions,
|
||||
overrideProvider,
|
||||
|
||||
@@ -152,6 +152,7 @@ async function getSessionInfo(fileIdentifier, apiKey) {
|
||||
* @param {Object} options
|
||||
* @param {ServerRequest} options.req
|
||||
* @param {Agent['tool_resources']} options.tool_resources
|
||||
* @param {string} [options.agentId] - The agent ID for file access control
|
||||
* @param {string} apiKey
|
||||
* @returns {Promise<{
|
||||
* files: Array<{ id: string; session_id: string; name: string }>,
|
||||
@@ -159,11 +160,18 @@ async function getSessionInfo(fileIdentifier, apiKey) {
|
||||
* }>}
|
||||
*/
|
||||
const primeFiles = async (options, apiKey) => {
|
||||
const { tool_resources } = options;
|
||||
const { tool_resources, req, agentId } = options;
|
||||
const file_ids = tool_resources?.[EToolResources.execute_code]?.file_ids ?? [];
|
||||
const agentResourceIds = new Set(file_ids);
|
||||
const resourceFiles = tool_resources?.[EToolResources.execute_code]?.files ?? [];
|
||||
const dbFiles = ((await getFiles({ file_id: { $in: file_ids } })) ?? []).concat(resourceFiles);
|
||||
const dbFiles = (
|
||||
(await getFiles(
|
||||
{ file_id: { $in: file_ids } },
|
||||
null,
|
||||
{ text: 0 },
|
||||
{ userId: req?.user?.id, agentId },
|
||||
)) ?? []
|
||||
).concat(resourceFiles);
|
||||
|
||||
const files = [];
|
||||
const sessions = new Map();
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
const { getBufferMetadata } = require('~/server/utils');
|
||||
const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Saves a file to a specified output path with a new filename.
|
||||
@@ -206,7 +207,7 @@ const deleteLocalFile = async (req, file) => {
|
||||
const cleanFilepath = file.filepath.split('?')[0];
|
||||
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
|
||||
@@ -4,6 +4,7 @@ const FormData = require('form-data');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
|
||||
/**
|
||||
* Deletes a file from the vector database. This function takes a file object, constructs the full path, and
|
||||
@@ -23,7 +24,8 @@ const deleteVectors = async (req, file) => {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
|
||||
return await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
@@ -70,7 +72,7 @@ async function uploadVectors({ req, file, file_id, entity_id }) {
|
||||
}
|
||||
|
||||
try {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
const formData = new FormData();
|
||||
formData.append('file_id', file_id);
|
||||
formData.append('file', fs.createReadStream(file.path));
|
||||
|
||||
@@ -55,7 +55,9 @@ const processFiles = async (files, fileIds) => {
|
||||
}
|
||||
|
||||
if (!fileIds) {
|
||||
return await Promise.all(promises);
|
||||
const results = await Promise.all(promises);
|
||||
// Filter out null results from failed updateFileUsage calls
|
||||
return results.filter((result) => result != null);
|
||||
}
|
||||
|
||||
for (let file_id of fileIds) {
|
||||
@@ -67,7 +69,9 @@ const processFiles = async (files, fileIds) => {
|
||||
}
|
||||
|
||||
// TODO: calculate token cost when image is first uploaded
|
||||
return await Promise.all(promises);
|
||||
const results = await Promise.all(promises);
|
||||
// Filter out null results from failed updateFileUsage calls
|
||||
return results.filter((result) => result != null);
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
208
api/server/services/Files/processFiles.test.js
Normal file
208
api/server/services/Files/processFiles.test.js
Normal file
@@ -0,0 +1,208 @@
|
||||
// Mock the updateFileUsage function before importing the actual processFiles
|
||||
jest.mock('~/models/File', () => ({
|
||||
updateFileUsage: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock winston and logger configuration to avoid dependency issues
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock all other dependencies that might cause issues
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
isUUID: { parse: jest.fn() },
|
||||
megabyte: 1024 * 1024,
|
||||
FileContext: { message_attachment: 'message_attachment' },
|
||||
FileSources: { local: 'local' },
|
||||
EModelEndpoint: { assistants: 'assistants' },
|
||||
EToolResources: { file_search: 'file_search' },
|
||||
mergeFileConfig: jest.fn(),
|
||||
removeNullishValues: jest.fn((obj) => obj),
|
||||
isAssistantsEndpoint: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/images', () => ({
|
||||
convertImage: jest.fn(),
|
||||
resizeAndConvert: jest.fn(),
|
||||
resizeImageBuffer: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/controllers/assistants/v2', () => ({
|
||||
addResourceFileId: jest.fn(),
|
||||
deleteResourceFileId: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models/Agent', () => ({
|
||||
addAgentResourceFile: jest.fn(),
|
||||
removeAgentResourceFiles: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/controllers/assistants/helpers', () => ({
|
||||
getOpenAIClient: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||
loadAuthValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
checkCapability: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/utils/queue', () => ({
|
||||
LB_QueueAsyncCall: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('./strategies', () => ({
|
||||
getStrategyFunctions: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/utils', () => ({
|
||||
determineFileType: jest.fn(),
|
||||
}));
|
||||
|
||||
// Import the actual processFiles function after all mocks are set up
|
||||
const { processFiles } = require('./process');
|
||||
const { updateFileUsage } = require('~/models/File');
|
||||
|
||||
describe('processFiles', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('null filtering functionality', () => {
|
||||
it('should filter out null results from updateFileUsage when files do not exist', async () => {
|
||||
const mockFiles = [
|
||||
{ file_id: 'existing-file-1' },
|
||||
{ file_id: 'non-existent-file' },
|
||||
{ file_id: 'existing-file-2' },
|
||||
];
|
||||
|
||||
// Mock updateFileUsage to return null for non-existent files
|
||||
updateFileUsage.mockImplementation(({ file_id }) => {
|
||||
if (file_id === 'non-existent-file') {
|
||||
return Promise.resolve(null); // Simulate file not found in the database
|
||||
}
|
||||
return Promise.resolve({ file_id, usage: 1 });
|
||||
});
|
||||
|
||||
const result = await processFiles(mockFiles);
|
||||
|
||||
expect(updateFileUsage).toHaveBeenCalledTimes(3);
|
||||
expect(result).toEqual([
|
||||
{ file_id: 'existing-file-1', usage: 1 },
|
||||
{ file_id: 'existing-file-2', usage: 1 },
|
||||
]);
|
||||
|
||||
// Critical test - ensure no null values in result
|
||||
expect(result).not.toContain(null);
|
||||
expect(result).not.toContain(undefined);
|
||||
expect(result.length).toBe(2); // Only valid files should be returned
|
||||
});
|
||||
|
||||
it('should return empty array when all updateFileUsage calls return null', async () => {
|
||||
const mockFiles = [{ file_id: 'non-existent-1' }, { file_id: 'non-existent-2' }];
|
||||
|
||||
// All updateFileUsage calls return null
|
||||
updateFileUsage.mockResolvedValue(null);
|
||||
|
||||
const result = await processFiles(mockFiles);
|
||||
|
||||
expect(updateFileUsage).toHaveBeenCalledTimes(2);
|
||||
expect(result).toEqual([]);
|
||||
expect(result).not.toContain(null);
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should work correctly when all files exist', async () => {
|
||||
const mockFiles = [{ file_id: 'file-1' }, { file_id: 'file-2' }];
|
||||
|
||||
updateFileUsage.mockImplementation(({ file_id }) => {
|
||||
return Promise.resolve({ file_id, usage: 1 });
|
||||
});
|
||||
|
||||
const result = await processFiles(mockFiles);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ file_id: 'file-1', usage: 1 },
|
||||
{ file_id: 'file-2', usage: 1 },
|
||||
]);
|
||||
expect(result).not.toContain(null);
|
||||
expect(result.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle fileIds parameter and filter nulls correctly', async () => {
|
||||
const mockFiles = [{ file_id: 'file-1' }];
|
||||
const mockFileIds = ['file-2', 'non-existent-file'];
|
||||
|
||||
updateFileUsage.mockImplementation(({ file_id }) => {
|
||||
if (file_id === 'non-existent-file') {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
return Promise.resolve({ file_id, usage: 1 });
|
||||
});
|
||||
|
||||
const result = await processFiles(mockFiles, mockFileIds);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ file_id: 'file-1', usage: 1 },
|
||||
{ file_id: 'file-2', usage: 1 },
|
||||
]);
|
||||
expect(result).not.toContain(null);
|
||||
expect(result).not.toContain(undefined);
|
||||
expect(result.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle duplicate file_ids correctly', async () => {
|
||||
const mockFiles = [
|
||||
{ file_id: 'duplicate-file' },
|
||||
{ file_id: 'duplicate-file' }, // Duplicate should be ignored
|
||||
{ file_id: 'unique-file' },
|
||||
];
|
||||
|
||||
updateFileUsage.mockImplementation(({ file_id }) => {
|
||||
return Promise.resolve({ file_id, usage: 1 });
|
||||
});
|
||||
|
||||
const result = await processFiles(mockFiles);
|
||||
|
||||
// Should only call updateFileUsage twice (duplicate ignored)
|
||||
expect(updateFileUsage).toHaveBeenCalledTimes(2);
|
||||
expect(result).toEqual([
|
||||
{ file_id: 'duplicate-file', usage: 1 },
|
||||
{ file_id: 'unique-file', usage: 1 },
|
||||
]);
|
||||
expect(result.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty files array', async () => {
|
||||
const result = await processFiles([]);
|
||||
expect(result).toEqual([]);
|
||||
expect(updateFileUsage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle mixed null and undefined returns from updateFileUsage', async () => {
|
||||
const mockFiles = [{ file_id: 'file-1' }, { file_id: 'file-2' }, { file_id: 'file-3' }];
|
||||
|
||||
updateFileUsage.mockImplementation(({ file_id }) => {
|
||||
if (file_id === 'file-1') return Promise.resolve(null);
|
||||
if (file_id === 'file-2') return Promise.resolve(undefined);
|
||||
return Promise.resolve({ file_id, usage: 1 });
|
||||
});
|
||||
|
||||
const result = await processFiles(mockFiles);
|
||||
|
||||
expect(result).toEqual([{ file_id: 'file-3', usage: 1 }]);
|
||||
expect(result).not.toContain(null);
|
||||
expect(result).not.toContain(undefined);
|
||||
expect(result.length).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,9 @@
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const { uploadMistralOCR, uploadAzureMistralOCR } = require('@librechat/api');
|
||||
const {
|
||||
uploadMistralOCR,
|
||||
uploadAzureMistralOCR,
|
||||
uploadGoogleVertexMistralOCR,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
getFirebaseURL,
|
||||
prepareImageURL,
|
||||
@@ -222,6 +226,26 @@ const azureMistralOCRStrategy = () => ({
|
||||
handleFileUpload: uploadAzureMistralOCR,
|
||||
});
|
||||
|
||||
const vertexMistralOCRStrategy = () => ({
|
||||
/** @type {typeof saveFileFromURL | null} */
|
||||
saveURL: null,
|
||||
/** @type {typeof getLocalFileURL | null} */
|
||||
getFileURL: null,
|
||||
/** @type {typeof saveLocalBuffer | null} */
|
||||
saveBuffer: null,
|
||||
/** @type {typeof processLocalAvatar | null} */
|
||||
processAvatar: null,
|
||||
/** @type {typeof uploadLocalImage | null} */
|
||||
handleImageUpload: null,
|
||||
/** @type {typeof prepareImagesLocal | null} */
|
||||
prepareImagePayload: null,
|
||||
/** @type {typeof deleteLocalFile | null} */
|
||||
deleteFile: null,
|
||||
/** @type {typeof getLocalFileStream | null} */
|
||||
getDownloadStream: null,
|
||||
handleFileUpload: uploadGoogleVertexMistralOCR,
|
||||
});
|
||||
|
||||
// Strategy Selector
|
||||
const getStrategyFunctions = (fileSource) => {
|
||||
if (fileSource === FileSources.firebase) {
|
||||
@@ -244,6 +268,8 @@ const getStrategyFunctions = (fileSource) => {
|
||||
return mistralOCRStrategy();
|
||||
} else if (fileSource === FileSources.azure_mistral_ocr) {
|
||||
return azureMistralOCRStrategy();
|
||||
} else if (fileSource === FileSources.vertexai_mistral_ocr) {
|
||||
return vertexMistralOCRStrategy();
|
||||
} else {
|
||||
throw new Error('Invalid file source');
|
||||
}
|
||||
|
||||
@@ -2,16 +2,16 @@ const { z } = require('zod');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Time, CacheKeys, StepTypes } = require('librechat-data-provider');
|
||||
const { sendEvent, normalizeServerName, MCPOAuthHandler } = require('@librechat/api');
|
||||
const { Constants: AgentConstants, Providers, GraphEvents } = require('@librechat/agents');
|
||||
const { Constants, ContentTypes, isAssistantsEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
Constants,
|
||||
ContentTypes,
|
||||
isAssistantsEndpoint,
|
||||
convertJsonSchemaToZod,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
sendEvent,
|
||||
MCPOAuthHandler,
|
||||
normalizeServerName,
|
||||
convertWithResolvedRefs,
|
||||
} = require('@librechat/api');
|
||||
const { findToken, createToken, updateToken } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getCachedTools } = require('./Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
@@ -113,7 +113,7 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) {
|
||||
/** @type {LCTool} */
|
||||
const { description, parameters } = toolDefinition;
|
||||
const isGoogle = _provider === Providers.VERTEXAI || _provider === Providers.GOOGLE;
|
||||
let schema = convertJsonSchemaToZod(parameters, {
|
||||
let schema = convertWithResolvedRefs(parameters, {
|
||||
allowEmptyObject: !isGoogle,
|
||||
transformOneOfAnyOf: true,
|
||||
});
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
Constants,
|
||||
StepTypes,
|
||||
@@ -8,9 +11,8 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { processRequiredActions } = require('~/server/services/ToolService');
|
||||
const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
|
||||
const { processMessages } = require('~/server/services/Threads');
|
||||
const { logger } = require('~/config');
|
||||
const { createOnProgress } = require('~/server/utils');
|
||||
|
||||
/**
|
||||
* Implements the StreamRunManager functionality for managing the streaming
|
||||
@@ -126,7 +128,7 @@ class StreamRunManager {
|
||||
conversationId: this.finalMessage.conversationId,
|
||||
};
|
||||
|
||||
sendMessage(this.res, contentData);
|
||||
sendEvent(this.res, contentData);
|
||||
}
|
||||
|
||||
/* <------------------ Misc. Helpers ------------------> */
|
||||
@@ -302,7 +304,7 @@ class StreamRunManager {
|
||||
|
||||
for (const d of delta[key]) {
|
||||
if (typeof d === 'object' && !Object.prototype.hasOwnProperty.call(d, 'index')) {
|
||||
logger.warn('Expected an object with an \'index\' for array updates but got:', d);
|
||||
logger.warn("Expected an object with an 'index' for array updates but got:", d);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,9 @@ async function initializeMCP(app) {
|
||||
await mcpManager.mapAvailableTools(toolsCopy, flowManager);
|
||||
await setCachedTools(toolsCopy, { isGlobal: true });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug('Cleared tools array cache after MCP initialization');
|
||||
logger.info('MCP servers initialized successfully');
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize MCP servers:', error);
|
||||
|
||||
407
api/server/utils/__tests__/staticCache.spec.js
Normal file
407
api/server/utils/__tests__/staticCache.spec.js
Normal file
@@ -0,0 +1,407 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const zlib = require('zlib');
|
||||
const staticCache = require('../staticCache');
|
||||
|
||||
describe('staticCache', () => {
|
||||
let app;
|
||||
let testDir;
|
||||
let testFile;
|
||||
let indexFile;
|
||||
let manifestFile;
|
||||
let swFile;
|
||||
|
||||
beforeAll(() => {
|
||||
// Create a test directory and files
|
||||
testDir = path.join(__dirname, 'test-static');
|
||||
if (!fs.existsSync(testDir)) {
|
||||
fs.mkdirSync(testDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create test files
|
||||
testFile = path.join(testDir, 'test.js');
|
||||
indexFile = path.join(testDir, 'index.html');
|
||||
manifestFile = path.join(testDir, 'manifest.json');
|
||||
swFile = path.join(testDir, 'sw.js');
|
||||
|
||||
const jsContent = 'console.log("test");';
|
||||
const htmlContent = '<html><body>Test</body></html>';
|
||||
const jsonContent = '{"name": "test"}';
|
||||
const swContent = 'self.addEventListener("install", () => {});';
|
||||
|
||||
fs.writeFileSync(testFile, jsContent);
|
||||
fs.writeFileSync(indexFile, htmlContent);
|
||||
fs.writeFileSync(manifestFile, jsonContent);
|
||||
fs.writeFileSync(swFile, swContent);
|
||||
|
||||
// Create gzipped versions of some files
|
||||
fs.writeFileSync(testFile + '.gz', zlib.gzipSync(jsContent));
|
||||
fs.writeFileSync(path.join(testDir, 'test.css'), 'body { color: red; }');
|
||||
fs.writeFileSync(path.join(testDir, 'test.css.gz'), zlib.gzipSync('body { color: red; }'));
|
||||
|
||||
// Create a file that only exists in gzipped form
|
||||
fs.writeFileSync(
|
||||
path.join(testDir, 'only-gzipped.js.gz'),
|
||||
zlib.gzipSync('console.log("only gzipped");'),
|
||||
);
|
||||
|
||||
// Create a subdirectory for dist/images testing
|
||||
const distImagesDir = path.join(testDir, 'dist', 'images');
|
||||
fs.mkdirSync(distImagesDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(distImagesDir, 'logo.png'), 'fake-png-data');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Clean up test files
|
||||
if (fs.existsSync(testDir)) {
|
||||
fs.rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
app = express();
|
||||
|
||||
// Clear environment variables
|
||||
delete process.env.NODE_ENV;
|
||||
delete process.env.STATIC_CACHE_S_MAX_AGE;
|
||||
delete process.env.STATIC_CACHE_MAX_AGE;
|
||||
});
|
||||
describe('cache headers in production', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should set standard cache headers for regular files', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
});
|
||||
|
||||
it('should set no-cache headers for index.html', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/index.html').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('no-store, no-cache, must-revalidate');
|
||||
});
|
||||
|
||||
it('should set no-cache headers for manifest.json', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/manifest.json').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('no-store, no-cache, must-revalidate');
|
||||
});
|
||||
|
||||
it('should set no-cache headers for sw.js', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/sw.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('no-store, no-cache, must-revalidate');
|
||||
});
|
||||
|
||||
it('should not set cache headers for /dist/images/ files', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/dist/images/logo.png').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=0');
|
||||
});
|
||||
|
||||
it('should set no-cache headers when noCache option is true', async () => {
|
||||
app.use(staticCache(testDir, { noCache: true }));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('no-store, no-cache, must-revalidate');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cache headers in non-production', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
});
|
||||
|
||||
it('should not set cache headers in development', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
// Our middleware should not set cache-control in non-production
|
||||
// Express static might set its own default headers
|
||||
const cacheControl = response.headers['cache-control'];
|
||||
expect(cacheControl).toBe('public, max-age=0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('environment variable configuration', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should use custom s-maxage from environment', async () => {
|
||||
process.env.STATIC_CACHE_S_MAX_AGE = '3600';
|
||||
|
||||
// Need to re-require to pick up new env vars
|
||||
jest.resetModules();
|
||||
const freshStaticCache = require('../staticCache');
|
||||
|
||||
app.use(freshStaticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=3600');
|
||||
});
|
||||
|
||||
it('should use custom max-age from environment', async () => {
|
||||
process.env.STATIC_CACHE_MAX_AGE = '7200';
|
||||
|
||||
// Need to re-require to pick up new env vars
|
||||
jest.resetModules();
|
||||
const freshStaticCache = require('../staticCache');
|
||||
|
||||
app.use(freshStaticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=7200, s-maxage=86400');
|
||||
});
|
||||
|
||||
it('should use both custom values from environment', async () => {
|
||||
process.env.STATIC_CACHE_S_MAX_AGE = '1800';
|
||||
process.env.STATIC_CACHE_MAX_AGE = '3600';
|
||||
|
||||
// Need to re-require to pick up new env vars
|
||||
jest.resetModules();
|
||||
const freshStaticCache = require('../staticCache');
|
||||
|
||||
app.use(freshStaticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=3600, s-maxage=1800');
|
||||
});
|
||||
});
|
||||
|
||||
describe('express-static-gzip behavior', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should serve gzipped files when client accepts gzip encoding', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'gzip, deflate')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.headers['content-type']).toMatch(/javascript/);
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
// Content should be decompressed by supertest
|
||||
expect(response.text).toBe('console.log("test");');
|
||||
});
|
||||
|
||||
it('should fall back to uncompressed files when client does not accept gzip', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'identity')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBeUndefined();
|
||||
expect(response.headers['content-type']).toMatch(/javascript/);
|
||||
expect(response.text).toBe('console.log("test");');
|
||||
});
|
||||
|
||||
it('should serve gzipped CSS files with correct content-type', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.css')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.headers['content-type']).toMatch(/css/);
|
||||
expect(response.text).toBe('body { color: red; }');
|
||||
});
|
||||
|
||||
it('should serve uncompressed files when no gzipped version exists', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/manifest.json')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBeUndefined();
|
||||
expect(response.headers['content-type']).toMatch(/json/);
|
||||
expect(response.text).toBe('{"name": "test"}');
|
||||
});
|
||||
|
||||
it('should handle files that only exist in gzipped form', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/only-gzipped.js')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.headers['content-type']).toMatch(/javascript/);
|
||||
expect(response.text).toBe('console.log("only gzipped");');
|
||||
});
|
||||
|
||||
it('should return 404 for gzip-only files when client does not accept gzip', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/only-gzipped.js')
|
||||
.set('Accept-Encoding', 'identity');
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
it('should handle cache headers correctly for gzipped content', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
expect(response.headers['content-type']).toMatch(/javascript/);
|
||||
});
|
||||
|
||||
it('should preserve original MIME types for gzipped files', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: false }));
|
||||
|
||||
const jsResponse = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
const cssResponse = await request(app)
|
||||
.get('/test.css')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
expect(jsResponse.headers['content-type']).toMatch(/javascript/);
|
||||
expect(cssResponse.headers['content-type']).toMatch(/css/);
|
||||
expect(jsResponse.headers['content-encoding']).toBe('gzip');
|
||||
expect(cssResponse.headers['content-encoding']).toBe('gzip');
|
||||
});
|
||||
});
|
||||
|
||||
describe('skipGzipScan option comparison', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should use express.static (no gzip) when skipGzipScan is true', async () => {
|
||||
app.use(staticCache(testDir, { skipGzipScan: true }));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
// Should NOT serve gzipped version even though client accepts it
|
||||
expect(response.headers['content-encoding']).toBeUndefined();
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
expect(response.text).toBe('console.log("test");');
|
||||
});
|
||||
|
||||
it('should use expressStaticGzip when skipGzipScan is false', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app)
|
||||
.get('/test.js')
|
||||
.set('Accept-Encoding', 'gzip')
|
||||
.expect(200);
|
||||
|
||||
// Should serve gzipped version when client accepts it
|
||||
expect(response.headers['content-encoding']).toBe('gzip');
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
expect(response.text).toBe('console.log("test");');
|
||||
});
|
||||
});
|
||||
|
||||
describe('file serving', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should serve files correctly', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/test.js').expect(200);
|
||||
|
||||
expect(response.text).toBe('console.log("test");');
|
||||
expect(response.headers['content-type']).toMatch(/javascript|text/);
|
||||
});
|
||||
|
||||
it('should return 404 for non-existent files', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/nonexistent.js');
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
it('should serve HTML files', async () => {
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/index.html').expect(200);
|
||||
|
||||
expect(response.text).toBe('<html><body>Test</body></html>');
|
||||
expect(response.headers['content-type']).toMatch(/html/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
beforeEach(() => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
});
|
||||
|
||||
it('should handle webmanifest files', async () => {
|
||||
// Create a webmanifest file
|
||||
const webmanifestFile = path.join(testDir, 'site.webmanifest');
|
||||
fs.writeFileSync(webmanifestFile, '{"name": "test app"}');
|
||||
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/site.webmanifest').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('no-store, no-cache, must-revalidate');
|
||||
|
||||
// Clean up
|
||||
fs.unlinkSync(webmanifestFile);
|
||||
});
|
||||
|
||||
it('should handle files in subdirectories', async () => {
|
||||
const subDir = path.join(testDir, 'subdir');
|
||||
fs.mkdirSync(subDir, { recursive: true });
|
||||
const subFile = path.join(subDir, 'nested.js');
|
||||
fs.writeFileSync(subFile, 'console.log("nested");');
|
||||
|
||||
app.use(staticCache(testDir));
|
||||
|
||||
const response = await request(app).get('/subdir/nested.js').expect(200);
|
||||
|
||||
expect(response.headers['cache-control']).toBe('public, max-age=172800, s-maxage=86400');
|
||||
expect(response.text).toBe('console.log("nested");');
|
||||
|
||||
// Clean up
|
||||
fs.rmSync(subDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,9 +7,9 @@ const {
|
||||
defaultAssistantsVersion,
|
||||
defaultAgentCapabilities,
|
||||
} = require('librechat-data-provider');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const partialRight = require('lodash/partialRight');
|
||||
const { sendMessage } = require('./streamResponse');
|
||||
|
||||
/** Helper function to escape special characters in regex
|
||||
* @param {string} string - The string to escape.
|
||||
@@ -37,7 +37,7 @@ const createOnProgress = (
|
||||
basePayload.text = basePayload.text + chunk;
|
||||
|
||||
const payload = Object.assign({}, basePayload, rest);
|
||||
sendMessage(res, payload);
|
||||
sendEvent(res, payload);
|
||||
if (_onProgress) {
|
||||
_onProgress(payload);
|
||||
}
|
||||
@@ -50,7 +50,7 @@ const createOnProgress = (
|
||||
const sendIntermediateMessage = (res, payload, extraTokens = '') => {
|
||||
basePayload.text = basePayload.text + extraTokens;
|
||||
const message = Object.assign({}, basePayload, payload);
|
||||
sendMessage(res, message);
|
||||
sendEvent(res, message);
|
||||
if (i === 0) {
|
||||
basePayload.initial = false;
|
||||
}
|
||||
|
||||
280
api/server/utils/import/importers-timestamp.spec.js
Normal file
280
api/server/utils/import/importers-timestamp.spec.js
Normal file
@@ -0,0 +1,280 @@
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { ImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { getImporter } = require('./importers');
|
||||
|
||||
// Mock the database methods
|
||||
jest.mock('~/models/Conversation', () => ({
|
||||
bulkSaveConvos: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/models/Message', () => ({
|
||||
bulkSaveMessages: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/cache/getLogStores');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const mockedCacheGet = jest.fn();
|
||||
getLogStores.mockImplementation(() => ({
|
||||
get: mockedCacheGet,
|
||||
}));
|
||||
|
||||
describe('Import Timestamp Ordering', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockedCacheGet.mockResolvedValue(null);
|
||||
});
|
||||
|
||||
describe('LibreChat Import - Timestamp Issues', () => {
|
||||
test('should maintain proper timestamp order between parent and child messages', async () => {
|
||||
// Create a LibreChat export with out-of-order timestamps
|
||||
const jsonData = {
|
||||
conversationId: 'test-convo-123',
|
||||
title: 'Test Conversation',
|
||||
messages: [
|
||||
{
|
||||
messageId: 'parent-1',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Parent Message',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:02:00Z', // Parent created AFTER child
|
||||
},
|
||||
{
|
||||
messageId: 'child-1',
|
||||
parentMessageId: 'parent-1',
|
||||
text: 'Child Message',
|
||||
sender: 'assistant',
|
||||
isCreatedByUser: false,
|
||||
createdAt: '2023-01-01T00:01:00Z', // Child created BEFORE parent
|
||||
},
|
||||
{
|
||||
messageId: 'grandchild-1',
|
||||
parentMessageId: 'child-1',
|
||||
text: 'Grandchild Message',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:00:30Z', // Even earlier
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
// Check the actual messages stored in the builder
|
||||
const savedMessages = importBatchBuilder.messages;
|
||||
|
||||
const parent = savedMessages.find((msg) => msg.text === 'Parent Message');
|
||||
const child = savedMessages.find((msg) => msg.text === 'Child Message');
|
||||
const grandchild = savedMessages.find((msg) => msg.text === 'Grandchild Message');
|
||||
|
||||
// Verify all messages were found
|
||||
expect(parent).toBeDefined();
|
||||
expect(child).toBeDefined();
|
||||
expect(grandchild).toBeDefined();
|
||||
|
||||
// FIXED behavior: timestamps ARE corrected
|
||||
expect(new Date(child.createdAt).getTime()).toBeGreaterThan(
|
||||
new Date(parent.createdAt).getTime(),
|
||||
);
|
||||
expect(new Date(grandchild.createdAt).getTime()).toBeGreaterThan(
|
||||
new Date(child.createdAt).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle complex multi-branch scenario with out-of-order timestamps', async () => {
|
||||
const jsonData = {
|
||||
conversationId: 'complex-test-123',
|
||||
title: 'Complex Test',
|
||||
messages: [
|
||||
// Branch 1: Root -> A -> B with reversed timestamps
|
||||
{
|
||||
messageId: 'root-1',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Root 1',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:03:00Z',
|
||||
},
|
||||
{
|
||||
messageId: 'a-1',
|
||||
parentMessageId: 'root-1',
|
||||
text: 'A1',
|
||||
sender: 'assistant',
|
||||
isCreatedByUser: false,
|
||||
createdAt: '2023-01-01T00:02:00Z', // Before parent
|
||||
},
|
||||
{
|
||||
messageId: 'b-1',
|
||||
parentMessageId: 'a-1',
|
||||
text: 'B1',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:01:00Z', // Before grandparent
|
||||
},
|
||||
// Branch 2: Root -> C -> D with mixed timestamps
|
||||
{
|
||||
messageId: 'root-2',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Root 2',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:00:30Z', // Earlier than branch 1
|
||||
},
|
||||
{
|
||||
messageId: 'c-2',
|
||||
parentMessageId: 'root-2',
|
||||
text: 'C2',
|
||||
sender: 'assistant',
|
||||
isCreatedByUser: false,
|
||||
createdAt: '2023-01-01T00:04:00Z', // Much later
|
||||
},
|
||||
{
|
||||
messageId: 'd-2',
|
||||
parentMessageId: 'c-2',
|
||||
text: 'D2',
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:02:30Z', // Between root and parent
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
const savedMessages = importBatchBuilder.messages;
|
||||
|
||||
// Verify that timestamps are preserved as-is (not corrected)
|
||||
const root1 = savedMessages.find((msg) => msg.text === 'Root 1');
|
||||
const a1 = savedMessages.find((msg) => msg.text === 'A1');
|
||||
const b1 = savedMessages.find((msg) => msg.text === 'B1');
|
||||
const root2 = savedMessages.find((msg) => msg.text === 'Root 2');
|
||||
const c2 = savedMessages.find((msg) => msg.text === 'C2');
|
||||
const d2 = savedMessages.find((msg) => msg.text === 'D2');
|
||||
|
||||
// Branch 1: timestamps should now be in correct order
|
||||
expect(new Date(a1.createdAt).getTime()).toBeGreaterThan(new Date(root1.createdAt).getTime());
|
||||
expect(new Date(b1.createdAt).getTime()).toBeGreaterThan(new Date(a1.createdAt).getTime());
|
||||
|
||||
// Branch 2: all timestamps should be properly ordered
|
||||
expect(new Date(c2.createdAt).getTime()).toBeGreaterThan(new Date(root2.createdAt).getTime());
|
||||
expect(new Date(d2.createdAt).getTime()).toBeGreaterThan(new Date(c2.createdAt).getTime());
|
||||
});
|
||||
|
||||
test('recursive format should NOW have timestamp protection', async () => {
|
||||
// Create a recursive LibreChat export with out-of-order timestamps
|
||||
const jsonData = {
|
||||
conversationId: 'recursive-test-123',
|
||||
title: 'Recursive Test',
|
||||
recursive: true,
|
||||
messages: [
|
||||
{
|
||||
messageId: 'parent-1',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Parent Message',
|
||||
sender: 'User',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:02:00Z', // Parent created AFTER child
|
||||
children: [
|
||||
{
|
||||
messageId: 'child-1',
|
||||
parentMessageId: 'parent-1',
|
||||
text: 'Child Message',
|
||||
sender: 'Assistant',
|
||||
isCreatedByUser: false,
|
||||
createdAt: '2023-01-01T00:01:00Z', // Child created BEFORE parent
|
||||
children: [
|
||||
{
|
||||
messageId: 'grandchild-1',
|
||||
parentMessageId: 'child-1',
|
||||
text: 'Grandchild Message',
|
||||
sender: 'User',
|
||||
isCreatedByUser: true,
|
||||
createdAt: '2023-01-01T00:00:30Z', // Even earlier
|
||||
children: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
const savedMessages = importBatchBuilder.messages;
|
||||
|
||||
// Messages should be saved
|
||||
expect(savedMessages).toHaveLength(3);
|
||||
|
||||
// In recursive format, timestamps are NOT included in the saved messages
|
||||
// The saveMessage method doesn't receive createdAt for recursive imports
|
||||
const parent = savedMessages.find((msg) => msg.text === 'Parent Message');
|
||||
const child = savedMessages.find((msg) => msg.text === 'Child Message');
|
||||
const grandchild = savedMessages.find((msg) => msg.text === 'Grandchild Message');
|
||||
|
||||
expect(parent).toBeDefined();
|
||||
expect(child).toBeDefined();
|
||||
expect(grandchild).toBeDefined();
|
||||
|
||||
// Recursive imports NOW preserve and correct timestamps
|
||||
expect(parent.createdAt).toBeDefined();
|
||||
expect(child.createdAt).toBeDefined();
|
||||
expect(grandchild.createdAt).toBeDefined();
|
||||
|
||||
// Timestamps should be corrected to maintain proper order
|
||||
expect(new Date(child.createdAt).getTime()).toBeGreaterThan(
|
||||
new Date(parent.createdAt).getTime(),
|
||||
);
|
||||
expect(new Date(grandchild.createdAt).getTime()).toBeGreaterThan(
|
||||
new Date(child.createdAt).getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Comparison with Fork Functionality', () => {
|
||||
test('fork functionality correctly handles timestamp issues (for comparison)', async () => {
|
||||
const { cloneMessagesWithTimestamps } = require('./fork');
|
||||
|
||||
const messagesToClone = [
|
||||
{
|
||||
messageId: 'parent',
|
||||
parentMessageId: Constants.NO_PARENT,
|
||||
text: 'Parent Message',
|
||||
createdAt: '2023-01-01T00:02:00Z', // Parent created AFTER child
|
||||
},
|
||||
{
|
||||
messageId: 'child',
|
||||
parentMessageId: 'parent',
|
||||
text: 'Child Message',
|
||||
createdAt: '2023-01-01T00:01:00Z', // Child created BEFORE parent
|
||||
},
|
||||
];
|
||||
|
||||
const importBatchBuilder = new ImportBatchBuilder('user-123');
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
cloneMessagesWithTimestamps(messagesToClone, importBatchBuilder);
|
||||
|
||||
const savedMessages = importBatchBuilder.messages;
|
||||
const parent = savedMessages.find((msg) => msg.text === 'Parent Message');
|
||||
const child = savedMessages.find((msg) => msg.text === 'Child Message');
|
||||
|
||||
// Fork functionality DOES correct the timestamps
|
||||
expect(new Date(child.createdAt).getTime()).toBeGreaterThan(
|
||||
new Date(parent.createdAt).getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, openAISettings, CacheKeys } = require('librechat-data-provider');
|
||||
const { createImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { cloneMessagesWithTimestamps } = require('./fork');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
@@ -107,67 +108,47 @@ async function importLibreChatConvo(
|
||||
|
||||
if (jsonData.recursive) {
|
||||
/**
|
||||
* Recursively traverse the messages tree and save each message to the database.
|
||||
* Flatten the recursive message tree into a flat array
|
||||
* @param {TMessage[]} messages
|
||||
* @param {string} parentMessageId
|
||||
* @param {TMessage[]} flatMessages
|
||||
*/
|
||||
const traverseMessages = async (messages, parentMessageId = null) => {
|
||||
const flattenMessages = (
|
||||
messages,
|
||||
parentMessageId = Constants.NO_PARENT,
|
||||
flatMessages = [],
|
||||
) => {
|
||||
for (const message of messages) {
|
||||
if (!message.text && !message.content) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let savedMessage;
|
||||
if (message.sender?.toLowerCase() === 'user' || message.isCreatedByUser) {
|
||||
savedMessage = await importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
content: message.content,
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
} else {
|
||||
savedMessage = await importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
content: message.content,
|
||||
sender: message.sender,
|
||||
isCreatedByUser: false,
|
||||
model: options.model,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
}
|
||||
const flatMessage = {
|
||||
...message,
|
||||
parentMessageId: parentMessageId,
|
||||
children: undefined, // Remove children from flat structure
|
||||
};
|
||||
flatMessages.push(flatMessage);
|
||||
|
||||
if (!firstMessageDate && message.createdAt) {
|
||||
firstMessageDate = new Date(message.createdAt);
|
||||
}
|
||||
|
||||
if (message.children && message.children.length > 0) {
|
||||
await traverseMessages(message.children, savedMessage.messageId);
|
||||
flattenMessages(message.children, message.messageId, flatMessages);
|
||||
}
|
||||
}
|
||||
return flatMessages;
|
||||
};
|
||||
|
||||
await traverseMessages(messagesToImport);
|
||||
const flatMessages = flattenMessages(messagesToImport);
|
||||
cloneMessagesWithTimestamps(flatMessages, importBatchBuilder);
|
||||
} else if (messagesToImport) {
|
||||
const idMapping = new Map();
|
||||
|
||||
cloneMessagesWithTimestamps(messagesToImport, importBatchBuilder);
|
||||
for (const message of messagesToImport) {
|
||||
if (!firstMessageDate && message.createdAt) {
|
||||
firstMessageDate = new Date(message.createdAt);
|
||||
}
|
||||
const newMessageId = uuidv4();
|
||||
idMapping.set(message.messageId, newMessageId);
|
||||
|
||||
const clonedMessage = {
|
||||
...message,
|
||||
messageId: newMessageId,
|
||||
parentMessageId:
|
||||
message.parentMessageId && message.parentMessageId !== Constants.NO_PARENT
|
||||
? idMapping.get(message.parentMessageId) || Constants.NO_PARENT
|
||||
: Constants.NO_PARENT,
|
||||
};
|
||||
|
||||
importBatchBuilder.saveMessage(clonedMessage);
|
||||
}
|
||||
} else {
|
||||
throw new Error('Invalid LibreChat file format');
|
||||
|
||||
@@ -175,36 +175,60 @@ describe('importLibreChatConvo', () => {
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
jest.spyOn(importBatchBuilder, 'saveBatch');
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
// Create a map to track original message IDs to new UUIDs
|
||||
const idToUUIDMap = new Map();
|
||||
importBatchBuilder.saveMessage.mock.calls.forEach((call) => {
|
||||
const message = call[0];
|
||||
idToUUIDMap.set(message.originalMessageId, message.messageId);
|
||||
// Get the imported messages
|
||||
const messages = importBatchBuilder.messages;
|
||||
expect(messages.length).toBeGreaterThan(0);
|
||||
|
||||
// Build maps for verification
|
||||
const textToMessageMap = new Map();
|
||||
const messageIdToMessage = new Map();
|
||||
messages.forEach((msg) => {
|
||||
if (msg.text) {
|
||||
// For recursive imports, text might be very long, so just use the first 100 chars as key
|
||||
const textKey = msg.text.substring(0, 100);
|
||||
textToMessageMap.set(textKey, msg);
|
||||
}
|
||||
messageIdToMessage.set(msg.messageId, msg);
|
||||
});
|
||||
|
||||
const checkChildren = (children, parentId) => {
|
||||
children.forEach((child) => {
|
||||
const childUUID = idToUUIDMap.get(child.messageId);
|
||||
const expectedParentId = idToUUIDMap.get(parentId) ?? null;
|
||||
const messageCall = importBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === childUUID,
|
||||
);
|
||||
|
||||
const actualParentId = messageCall[0].parentMessageId;
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
|
||||
if (child.children && child.children.length > 0) {
|
||||
checkChildren(child.children, child.messageId);
|
||||
// Count expected messages from the tree
|
||||
const countMessagesInTree = (nodes) => {
|
||||
let count = 0;
|
||||
nodes.forEach((node) => {
|
||||
if (node.text || node.content) {
|
||||
count++;
|
||||
}
|
||||
if (node.children && node.children.length > 0) {
|
||||
count += countMessagesInTree(node.children);
|
||||
}
|
||||
});
|
||||
return count;
|
||||
};
|
||||
|
||||
// Start hierarchy validation from root messages
|
||||
checkChildren(jsonData.messages, null);
|
||||
const expectedMessageCount = countMessagesInTree(jsonData.messages);
|
||||
expect(messages.length).toBe(expectedMessageCount);
|
||||
|
||||
// Verify all messages have valid parent relationships
|
||||
messages.forEach((msg) => {
|
||||
if (msg.parentMessageId !== Constants.NO_PARENT) {
|
||||
const parent = messageIdToMessage.get(msg.parentMessageId);
|
||||
expect(parent).toBeDefined();
|
||||
|
||||
// Verify timestamp ordering
|
||||
if (msg.createdAt && parent.createdAt) {
|
||||
expect(new Date(msg.createdAt).getTime()).toBeGreaterThanOrEqual(
|
||||
new Date(parent.createdAt).getTime(),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Verify at least one root message exists
|
||||
const rootMessages = messages.filter((msg) => msg.parentMessageId === Constants.NO_PARENT);
|
||||
expect(rootMessages.length).toBeGreaterThan(0);
|
||||
|
||||
expect(importBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
const streamResponse = require('./streamResponse');
|
||||
const removePorts = require('./removePorts');
|
||||
const countTokens = require('./countTokens');
|
||||
const handleText = require('./handleText');
|
||||
const sendEmail = require('./sendEmail');
|
||||
const queue = require('./queue');
|
||||
const files = require('./files');
|
||||
const math = require('./math');
|
||||
|
||||
/**
|
||||
* Check if email configuration is set
|
||||
@@ -28,7 +26,6 @@ function checkEmailConfig() {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
...streamResponse,
|
||||
checkEmailConfig,
|
||||
...handleText,
|
||||
countTokens,
|
||||
@@ -36,5 +33,4 @@ module.exports = {
|
||||
sendEmail,
|
||||
...files,
|
||||
...queue,
|
||||
math,
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user