Compare commits
46 Commits
feat/agent
...
refactor/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62b4d29967 | ||
|
|
f7ea232760 | ||
|
|
28cd234a6c | ||
|
|
550bf56077 | ||
|
|
ab4596206f | ||
|
|
84c78f5812 | ||
|
|
1bdfb143eb | ||
|
|
20a486a190 | ||
|
|
6873b396ea | ||
|
|
1c7b3b53da | ||
|
|
c160e7c7d5 | ||
|
|
c0d6404385 | ||
|
|
71a14517cd | ||
|
|
647b1bbac6 | ||
|
|
5eef6ea9e8 | ||
|
|
240e3bd59e | ||
|
|
89fb9c7e1c | ||
|
|
8525c8df36 | ||
|
|
46f9c90223 | ||
|
|
1d2be247cf | ||
|
|
d853c10920 | ||
|
|
5e70d518aa | ||
|
|
8df0ecd438 | ||
|
|
f25e4253d0 | ||
|
|
493f60fa54 | ||
|
|
1168a7d82e | ||
|
|
8f89fdc802 | ||
|
|
57513f7ac9 | ||
|
|
c82c47ab6a | ||
|
|
b0256510b5 | ||
|
|
e0980e796a | ||
|
|
370e2d7ade | ||
|
|
0e0758edaf | ||
|
|
50bd6d3a02 | ||
|
|
677481dde6 | ||
|
|
2229672929 | ||
|
|
68c6afd009 | ||
|
|
0b5816d1be | ||
|
|
e7af3bdaed | ||
|
|
b2b2aee945 | ||
|
|
2501d11fa0 | ||
|
|
eeab69ff7f | ||
|
|
5bb731764c | ||
|
|
5b43bf6c95 | ||
|
|
3ecb96149a | ||
|
|
b992fed16c |
11
.env.example
11
.env.example
@@ -40,13 +40,6 @@ NO_INDEX=true
|
||||
# Defaulted to 1.
|
||||
TRUST_PROXY=1
|
||||
|
||||
# Minimum password length for user authentication
|
||||
# Default: 8
|
||||
# Note: When using LDAP authentication, you may want to set this to 1
|
||||
# to bypass local password validation, as LDAP servers handle their own
|
||||
# password policies.
|
||||
# MIN_PASSWORD_LENGTH=8
|
||||
|
||||
#===============#
|
||||
# JSON Logging #
|
||||
#===============#
|
||||
@@ -667,10 +660,6 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# REDIS_URI=rediss://127.0.0.1:6380
|
||||
# REDIS_CA=/path/to/ca-cert.pem
|
||||
|
||||
# Elasticache may need to use an alternate dnsLookup for TLS connections. see "Special Note: Aws Elasticache Clusters with TLS" on this webpage: https://www.npmjs.com/package/ioredis
|
||||
# Enable alternative dnsLookup for redis
|
||||
# REDIS_USE_ALTERNATIVE_DNS_LOOKUP=true
|
||||
|
||||
# Redis authentication (if required)
|
||||
# REDIS_USERNAME=your_redis_username
|
||||
# REDIS_PASSWORD=your_redis_password
|
||||
|
||||
33
.github/workflows/i18n-unused-keys.yml
vendored
33
.github/workflows/i18n-unused-keys.yml
vendored
@@ -1,10 +1,5 @@
|
||||
name: Detect Unused i18next Strings
|
||||
|
||||
# This workflow checks for unused i18n keys in translation files.
|
||||
# It has special handling for:
|
||||
# - com_ui_special_var_* keys that are dynamically constructed
|
||||
# - com_agents_category_* keys that are stored in the database and used dynamically
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
@@ -12,7 +7,6 @@ on:
|
||||
- "api/**"
|
||||
- "packages/data-provider/src/**"
|
||||
- "packages/client/**"
|
||||
- "packages/data-schemas/src/**"
|
||||
|
||||
jobs:
|
||||
detect-unused-i18n-keys:
|
||||
@@ -30,7 +24,7 @@ jobs:
|
||||
|
||||
# Define paths
|
||||
I18N_FILE="client/src/locales/en/translation.json"
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client" "packages/data-schemas/src")
|
||||
SOURCE_DIRS=("client/src" "api" "packages/data-provider/src" "packages/client")
|
||||
|
||||
# Check if translation file exists
|
||||
if [[ ! -f "$I18N_FILE" ]]; then
|
||||
@@ -58,31 +52,6 @@ jobs:
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -q "$KEY" "$DIR"; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
# Special case for agent category keys that are dynamically used from database
|
||||
elif [[ "$KEY" == com_agents_category_* ]]; then
|
||||
# Check if agent category localization is being used
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
# Check for dynamic category label/description usage
|
||||
if grep -r --include=\*.{js,jsx,ts,tsx} -E "category\.(label|description).*startsWith.*['\"]com_" "$DIR" > /dev/null 2>&1 || \
|
||||
# Check for the method that defines these keys
|
||||
grep -r --include=\*.{js,jsx,ts,tsx} "ensureDefaultCategories" "$DIR" > /dev/null 2>&1 || \
|
||||
# Check for direct usage in agentCategory.ts
|
||||
grep -r --include=\*.ts -E "label:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1 || \
|
||||
grep -r --include=\*.ts -E "description:.*['\"]$KEY['\"]" "$DIR" > /dev/null 2>&1; then
|
||||
FOUND=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Also check if the key is directly used somewhere
|
||||
if [[ "$FOUND" == false ]]; then
|
||||
for DIR in "${SOURCE_DIRS[@]}"; do
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.8.0-rc3
|
||||
# v0.8.0-rc2
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.8.0-rc3
|
||||
# v0.8.0-rc2
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
15
README.md
15
README.md
@@ -65,10 +65,8 @@
|
||||
|
||||
- 🔦 **Agents & Tools Integration**:
|
||||
- **[LibreChat Agents](https://www.librechat.ai/docs/features/agents)**:
|
||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers
|
||||
- Agent Marketplace: Discover and deploy community-built agents
|
||||
- Collaborative Sharing: Share agents with specific users and groups
|
||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||
- No-Code Custom Assistants: Build specialized, AI-driven helpers without coding
|
||||
- Flexible & Extensible: Use MCP Servers, tools, file search, code execution, and more
|
||||
- Compatible with Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, Google, Vertex AI, Responses API, and more
|
||||
- [Model Context Protocol (MCP) Support](https://modelcontextprotocol.io/clients#librechat) for Tools
|
||||
|
||||
@@ -89,18 +87,15 @@
|
||||
- Create, Save, & Share Custom Presets
|
||||
- Switch between AI Endpoints and Presets mid-chat
|
||||
- Edit, Resubmit, and Continue Messages with Conversation branching
|
||||
- Create and share prompts with specific users and groups
|
||||
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
|
||||
|
||||
- 💬 **Multimodal & File Interactions**:
|
||||
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
|
||||
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
|
||||
|
||||
- 🌎 **Multilingual UI**:
|
||||
- English, 中文 (简体), 中文 (繁體), العربية, Deutsch, Español, Français, Italiano
|
||||
- Polski, Português (PT), Português (BR), Русский, 日本語, Svenska, 한국어, Tiếng Việt
|
||||
- Türkçe, Nederlands, עברית, Català, Čeština, Dansk, Eesti, فارسی
|
||||
- Suomi, Magyar, Հայերեն, Bahasa Indonesia, ქართული, Latviešu, ไทย, ئۇيغۇرچە
|
||||
- 🌎 **Multilingual UI**:
|
||||
- English, 中文, Deutsch, Español, Français, Italiano, Polski, Português Brasileiro
|
||||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||
|
||||
- 🧠 **Reasoning UI**:
|
||||
- Dynamic Reasoning UI for Chain-of-Thought/Reasoning AI models like DeepSeek-R1
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
const crypto = require('crypto');
|
||||
const fetch = require('node-fetch');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
supportsBalanceCheck,
|
||||
isAgentsEndpoint,
|
||||
@@ -13,10 +11,12 @@ const {
|
||||
Constants,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { truncateToolCallOutputs } = require('./prompts');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const TextStream = require('./TextStream');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
class BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
@@ -190,8 +190,7 @@ class BaseClient {
|
||||
this.user = user;
|
||||
const saveOptions = this.getSaveOptions();
|
||||
this.abortController = opts.abortController ?? new AbortController();
|
||||
const requestConvoId = overrideConvoId ?? opts.conversationId;
|
||||
const conversationId = requestConvoId ?? crypto.randomUUID();
|
||||
const conversationId = overrideConvoId ?? opts.conversationId ?? crypto.randomUUID();
|
||||
const parentMessageId = opts.parentMessageId ?? Constants.NO_PARENT;
|
||||
const userMessageId =
|
||||
overrideUserMessageId ?? opts.overrideParentMessageId ?? crypto.randomUUID();
|
||||
@@ -216,12 +215,11 @@ class BaseClient {
|
||||
...opts,
|
||||
user,
|
||||
head,
|
||||
saveOptions,
|
||||
userMessageId,
|
||||
requestConvoId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
userMessageId,
|
||||
responseMessageId,
|
||||
saveOptions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -240,12 +238,11 @@ class BaseClient {
|
||||
const {
|
||||
user,
|
||||
head,
|
||||
saveOptions,
|
||||
userMessageId,
|
||||
requestConvoId,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
userMessageId,
|
||||
responseMessageId,
|
||||
saveOptions,
|
||||
} = await this.setMessageOptions(opts);
|
||||
|
||||
const userMessage = opts.isEdited
|
||||
@@ -267,8 +264,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
if (typeof opts?.onStart === 'function') {
|
||||
const isNewConvo = !requestConvoId && parentMessageId === Constants.NO_PARENT;
|
||||
opts.onStart(userMessage, responseMessageId, isNewConvo);
|
||||
opts.onStart(userMessage, responseMessageId);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -574,7 +570,7 @@ class BaseClient {
|
||||
}
|
||||
|
||||
async sendMessage(message, opts = {}) {
|
||||
const appConfig = this.options.req?.config;
|
||||
const appConfig = await getAppConfig({ role: this.options.req?.user?.role });
|
||||
/** @type {Promise<TMessage>} */
|
||||
let userMessagePromise;
|
||||
const { user, head, isEdited, conversationId, responseMessageId, saveOptions, userMessage } =
|
||||
@@ -661,9 +657,9 @@ class BaseClient {
|
||||
}
|
||||
}
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
const balance = appConfig?.balance;
|
||||
if (
|
||||
balanceConfig?.enabled &&
|
||||
balance?.enabled &&
|
||||
supportsBalanceCheck[this.options.endpointType ?? this.options.endpoint]
|
||||
) {
|
||||
await checkBalance({
|
||||
@@ -760,9 +756,9 @@ class BaseClient {
|
||||
completionTokens = responseMessage.tokenCount;
|
||||
await this.recordTokenUsage({
|
||||
usage,
|
||||
balance,
|
||||
promptTokens,
|
||||
completionTokens,
|
||||
balance: balanceConfig,
|
||||
model: responseMessage.model,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ const {
|
||||
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
@@ -688,7 +689,7 @@ class OpenAIClient extends BaseClient {
|
||||
* In case of failure, it will return the default title, "New Chat".
|
||||
*/
|
||||
async titleConvo({ text, conversationId, responseText = '' }) {
|
||||
const appConfig = this.options.req?.config;
|
||||
const appConfig = await getAppConfig({ role: this.options.req?.user?.role });
|
||||
this.conversationId = conversationId;
|
||||
|
||||
if (this.options.attachments) {
|
||||
@@ -1106,7 +1107,7 @@ ${convo}
|
||||
}
|
||||
|
||||
async chatCompletion({ payload, onProgress, abortController = null }) {
|
||||
const appConfig = this.options.req?.config;
|
||||
const appConfig = await getAppConfig({ role: this.options.req?.user?.role });
|
||||
let error = null;
|
||||
let intermediateReply = [];
|
||||
const errorCallback = (err) => (error = err);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const axios = require('axios');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, generateShortLivedToken } = require('@librechat/api');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
|
||||
const footer = `Use the context as your learned knowledge to better answer the user.
|
||||
|
||||
|
||||
@@ -587,8 +587,6 @@ describe('BaseClient', () => {
|
||||
expect(onStart).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ text: 'Hello, world!' }),
|
||||
expect.any(String),
|
||||
/** `isNewConvo` */
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"pluginKey": "image_gen_oai",
|
||||
"toolkit": true,
|
||||
"description": "Image Generation and Editing using OpenAI's latest state-of-the-art models",
|
||||
"icon": "assets/image_gen_oai.png",
|
||||
"icon": "/assets/image_gen_oai.png",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "IMAGE_GEN_OAI_API_KEY",
|
||||
@@ -75,7 +75,7 @@
|
||||
"name": "Browser",
|
||||
"pluginKey": "web-browser",
|
||||
"description": "Scrape and summarize webpage data",
|
||||
"icon": "assets/web-browser.svg",
|
||||
"icon": "/assets/web-browser.svg",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "OPENAI_API_KEY",
|
||||
@@ -170,7 +170,7 @@
|
||||
"name": "OpenWeather",
|
||||
"pluginKey": "open_weather",
|
||||
"description": "Get weather forecasts and historical data from the OpenWeather API",
|
||||
"icon": "assets/openweather.png",
|
||||
"icon": "/assets/openweather.png",
|
||||
"authConfig": [
|
||||
{
|
||||
"authField": "OPENWEATHER_API_KEY",
|
||||
|
||||
@@ -11,14 +11,14 @@ const paths = require('~/config/paths');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const displayMessage =
|
||||
"Stable Diffusion displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
|
||||
'Stable Diffusion displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.';
|
||||
|
||||
class StableDiffusionAPI extends Tool {
|
||||
constructor(fields) {
|
||||
super();
|
||||
/** @type {string} User ID */
|
||||
this.userId = fields.userId;
|
||||
/** @type {ServerRequest | undefined} Express Request object, only provided by ToolService */
|
||||
/** @type {Express.Request | undefined} Express Request object, only provided by ToolService */
|
||||
this.req = fields.req;
|
||||
/** @type {boolean} Used to initialize the Tool without necessary variables. */
|
||||
this.override = fields.override ?? false;
|
||||
@@ -44,7 +44,7 @@ class StableDiffusionAPI extends Tool {
|
||||
// "negative_prompt":"semi-realistic, cgi, 3d, render, sketch, cartoon, drawing, anime, out of frame, low quality, ugly, mutation, deformed"
|
||||
// - Generate images only once per human query unless explicitly requested by the user`;
|
||||
this.description =
|
||||
"You can generate images using text with 'stable-diffusion'. This tool is exclusively for visual content.";
|
||||
'You can generate images using text with \'stable-diffusion\'. This tool is exclusively for visual content.';
|
||||
this.schema = z.object({
|
||||
prompt: z
|
||||
.string()
|
||||
|
||||
@@ -2,9 +2,9 @@ const { z } = require('zod');
|
||||
const axios = require('axios');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generateShortLivedToken } = require('@librechat/api');
|
||||
const { Tools, EToolResources } = require('librechat-data-provider');
|
||||
const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions');
|
||||
const { generateShortLivedToken } = require('~/server/services/AuthService');
|
||||
const { getFiles } = require('~/models/File');
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,7 +3,7 @@ const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { mcpToolPattern, loadWebSearchAuth } = require('@librechat/api');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const { Tools, Constants, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const { Tools, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const {
|
||||
availableTools,
|
||||
manifestToolMap,
|
||||
@@ -24,9 +24,9 @@ const {
|
||||
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
|
||||
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { createMCPTool, createMCPTools } = require('~/server/services/MCP');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { createMCPTool } = require('~/server/services/MCP');
|
||||
|
||||
/**
|
||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||
@@ -123,8 +123,6 @@ const getAuthFields = (toolKey) => {
|
||||
*
|
||||
* @param {object} params
|
||||
* @param {string} params.user
|
||||
* @param {Record<string, Record<string, string>>} [object.userMCPAuthMap]
|
||||
* @param {AbortSignal} [object.signal]
|
||||
* @param {Pick<Agent, 'id' | 'provider' | 'model'>} [params.agent]
|
||||
* @param {string} [params.model]
|
||||
* @param {EModelEndpoint} [params.endpoint]
|
||||
@@ -142,9 +140,7 @@ const loadTools = async ({
|
||||
user,
|
||||
agent,
|
||||
model,
|
||||
signal,
|
||||
endpoint,
|
||||
userMCPAuthMap,
|
||||
tools = [],
|
||||
options = {},
|
||||
functions = true,
|
||||
@@ -243,7 +239,6 @@ const loadTools = async ({
|
||||
/** @type {Record<string, string>} */
|
||||
const toolContextMap = {};
|
||||
const cachedTools = (await getCachedTools({ userId: user, includeGlobal: true })) ?? {};
|
||||
const requestedMCPTools = {};
|
||||
|
||||
for (const tool of tools) {
|
||||
if (tool === Tools.execute_code) {
|
||||
@@ -311,45 +306,14 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
};
|
||||
continue;
|
||||
} else if (tool && cachedTools && mcpToolPattern.test(tool)) {
|
||||
const [toolName, serverName] = tool.split(Constants.mcp_delimiter);
|
||||
if (toolName === Constants.mcp_server) {
|
||||
/** Placeholder used for UI purposes */
|
||||
continue;
|
||||
}
|
||||
if (serverName && options.req?.config?.mcpConfig?.[serverName] == null) {
|
||||
logger.warn(
|
||||
`MCP server "${serverName}" for "${toolName}" tool is not configured${agent?.id != null && agent.id ? ` but attached to "${agent.id}"` : ''}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if (toolName === Constants.mcp_all) {
|
||||
const currentMCPGenerator = async (index) =>
|
||||
createMCPTools({
|
||||
req: options.req,
|
||||
res: options.res,
|
||||
index,
|
||||
serverName,
|
||||
userMCPAuthMap,
|
||||
model: agent?.model ?? model,
|
||||
provider: agent?.provider ?? endpoint,
|
||||
signal,
|
||||
});
|
||||
requestedMCPTools[serverName] = [currentMCPGenerator];
|
||||
continue;
|
||||
}
|
||||
const currentMCPGenerator = async (index) =>
|
||||
requestedTools[tool] = async () =>
|
||||
createMCPTool({
|
||||
index,
|
||||
req: options.req,
|
||||
res: options.res,
|
||||
toolKey: tool,
|
||||
userMCPAuthMap,
|
||||
model: agent?.model ?? model,
|
||||
provider: agent?.provider ?? endpoint,
|
||||
signal,
|
||||
});
|
||||
requestedMCPTools[serverName] = requestedMCPTools[serverName] || [];
|
||||
requestedMCPTools[serverName].push(currentMCPGenerator);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -389,34 +353,6 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
|
||||
}
|
||||
|
||||
const loadedTools = (await Promise.all(toolPromises)).flatMap((plugin) => plugin || []);
|
||||
const mcpToolPromises = [];
|
||||
/** MCP server tools are initialized sequentially by server */
|
||||
let index = -1;
|
||||
for (const [serverName, generators] of Object.entries(requestedMCPTools)) {
|
||||
index++;
|
||||
for (const generator of generators) {
|
||||
try {
|
||||
if (generator && generators.length === 1) {
|
||||
mcpToolPromises.push(
|
||||
generator(index).catch((error) => {
|
||||
logger.error(`Error loading ${serverName} tools:`, error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const mcpTool = await generator(index);
|
||||
if (Array.isArray(mcpTool)) {
|
||||
loadedTools.push(...mcpTool);
|
||||
} else if (mcpTool) {
|
||||
loadedTools.push(mcpTool);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error loading MCP tool for server ${serverName}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
loadedTools.push(...(await Promise.all(mcpToolPromises)).flatMap((plugin) => plugin || []));
|
||||
return { loadedTools, toolContextMap };
|
||||
};
|
||||
|
||||
|
||||
3
api/cache/cacheConfig.js
vendored
3
api/cache/cacheConfig.js
vendored
@@ -52,9 +52,6 @@ const cacheConfig = {
|
||||
REDIS_CONNECT_TIMEOUT: math(process.env.REDIS_CONNECT_TIMEOUT, 10000),
|
||||
/** Queue commands when disconnected */
|
||||
REDIS_ENABLE_OFFLINE_QUEUE: isEnabled(process.env.REDIS_ENABLE_OFFLINE_QUEUE ?? 'true'),
|
||||
/** flag to modify redis connection by adding dnsLookup this is required when connecting to elasticache for ioredis
|
||||
* see "Special Note: Aws Elasticache Clusters with TLS" on this webpage: https://www.npmjs.com/package/ioredis **/
|
||||
REDIS_USE_ALTERNATIVE_DNS_LOOKUP: isEnabled(process.env.REDIS_USE_ALTERNATIVE_DNS_LOOKUP),
|
||||
/** Enable redis cluster without the need of multiple URIs */
|
||||
USE_REDIS_CLUSTER: isEnabled(process.env.USE_REDIS_CLUSTER ?? 'false'),
|
||||
CI: isEnabled(process.env.CI),
|
||||
|
||||
1
api/cache/getLogStores.js
vendored
1
api/cache/getLogStores.js
vendored
@@ -31,6 +31,7 @@ const namespaces = {
|
||||
[CacheKeys.SAML_SESSION]: sessionCache(CacheKeys.SAML_SESSION),
|
||||
|
||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||
[CacheKeys.MCP_TOOLS]: standardCache(CacheKeys.MCP_TOOLS),
|
||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||
|
||||
3
api/cache/redisClients.js
vendored
3
api/cache/redisClients.js
vendored
@@ -53,9 +53,6 @@ if (cacheConfig.USE_REDIS) {
|
||||
: new IoRedis.Cluster(
|
||||
urls.map((url) => ({ host: url.hostname, port: parseInt(url.port, 10) || 6379 })),
|
||||
{
|
||||
...(cacheConfig.REDIS_USE_ALTERNATIVE_DNS_LOOKUP
|
||||
? { dnsLookup: (address, callback) => callback(null, address) }
|
||||
: {}),
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times) => {
|
||||
if (
|
||||
|
||||
@@ -2,7 +2,7 @@ const mongoose = require('mongoose');
|
||||
const crypto = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ResourceType, SystemRoles, Tools, actionDelimiter } = require('librechat-data-provider');
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_all, mcp_delimiter } =
|
||||
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
|
||||
require('librechat-data-provider').Constants;
|
||||
const {
|
||||
removeAgentFromAllProjects,
|
||||
@@ -78,7 +78,6 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
tools.push(Tools.web_search);
|
||||
}
|
||||
|
||||
const addedServers = new Set();
|
||||
if (mcpServers.size > 0) {
|
||||
for (const toolName of Object.keys(availableTools)) {
|
||||
if (!toolName.includes(mcp_delimiter)) {
|
||||
@@ -86,17 +85,9 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _
|
||||
}
|
||||
const mcpServer = toolName.split(mcp_delimiter)?.[1];
|
||||
if (mcpServer && mcpServers.has(mcpServer)) {
|
||||
addedServers.add(mcpServer);
|
||||
tools.push(toolName);
|
||||
}
|
||||
}
|
||||
|
||||
for (const mcpServer of mcpServers) {
|
||||
if (addedServers.has(mcpServer)) {
|
||||
continue;
|
||||
}
|
||||
tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
|
||||
}
|
||||
}
|
||||
|
||||
const instructions = req.body.promptPrefix;
|
||||
@@ -681,7 +672,7 @@ const getListAgents = async (searchParameter) => {
|
||||
* This function also updates the corresponding projects to include or exclude the agent ID.
|
||||
*
|
||||
* @param {Object} params - Parameters for updating the agent's projects.
|
||||
* @param {IUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {MongoUser} params.user - Parameters for updating the agent's projects.
|
||||
* @param {string} params.agentId - The ID of the agent to update.
|
||||
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
||||
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
const { Conversation } = require('~/db/models');
|
||||
|
||||
@@ -101,7 +102,9 @@ module.exports = {
|
||||
|
||||
if (req?.body?.isTemporary) {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({
|
||||
role: req.user.role,
|
||||
});
|
||||
update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
|
||||
} catch (err) {
|
||||
logger.error('Error creating temporary chat expiration date:', err);
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
} = require('./Conversation');
|
||||
jest.mock('~/server/services/Config/app');
|
||||
jest.mock('./Message');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const { getMessages, deleteMessages } = require('./Message');
|
||||
|
||||
const { Conversation } = require('~/db/models');
|
||||
@@ -49,11 +50,6 @@ describe('Conversation Operations', () => {
|
||||
mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {},
|
||||
config: {
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24, // Default 24 hours
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockConversationData = {
|
||||
@@ -123,7 +119,11 @@ describe('Conversation Operations', () => {
|
||||
describe('isTemporary conversation handling', () => {
|
||||
it('should save a conversation with expiredAt when isTemporary is true', async () => {
|
||||
// Mock app config with 24 hour retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -168,7 +168,11 @@ describe('Conversation Operations', () => {
|
||||
|
||||
it('should use custom retention period from config', async () => {
|
||||
// Mock app config with 48 hour retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 48;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 48,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -191,7 +195,11 @@ describe('Conversation Operations', () => {
|
||||
|
||||
it('should handle minimum retention period (1 hour)', async () => {
|
||||
// Mock app config with less than minimum retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -214,7 +222,11 @@ describe('Conversation Operations', () => {
|
||||
|
||||
it('should handle maximum retention period (8760 hours)', async () => {
|
||||
// Mock app config with more than maximum retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -235,36 +247,22 @@ describe('Conversation Operations', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing config gracefully', async () => {
|
||||
// Simulate missing config - should use default retention period
|
||||
delete mockReq.config;
|
||||
it('should handle getAppConfig errors gracefully', async () => {
|
||||
// Mock getAppConfig to throw an error
|
||||
getAppConfig.mockRejectedValue(new Error('Config service unavailable'));
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveConvo(mockReq, mockConversationData);
|
||||
const afterSave = new Date();
|
||||
|
||||
// Should still save the conversation with default retention period (30 days)
|
||||
// Should still save the conversation but with expiredAt as null
|
||||
expect(result.conversationId).toBe(mockConversationData.conversationId);
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||
|
||||
// Verify expiredAt is approximately 30 days in the future (720 hours)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
new Date(afterSave.getTime() + 720 * 60 * 60 * 1000 + 1000).getTime(),
|
||||
);
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use default retention when config is not provided', async () => {
|
||||
// Mock getAppConfig to return empty config
|
||||
mockReq.config = {}; // Empty config
|
||||
getAppConfig.mockResolvedValue({});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -287,7 +285,11 @@ describe('Conversation Operations', () => {
|
||||
|
||||
it('should update expiredAt when saving existing temporary conversation', async () => {
|
||||
// First save a temporary conversation
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const firstSave = await saveConvo(mockReq, mockConversationData);
|
||||
|
||||
@@ -211,7 +211,7 @@ describe('File Access Control', () => {
|
||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||
});
|
||||
|
||||
it('should deny access when user only has VIEW permission and needs access for deletion', async () => {
|
||||
it('should deny access when user only has VIEW permission', async () => {
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const agentId = uuidv4();
|
||||
@@ -263,71 +263,12 @@ describe('File Access Control', () => {
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
agentId,
|
||||
isDelete: true,
|
||||
});
|
||||
|
||||
// Should have no access to any files when only VIEW permission
|
||||
expect(accessMap.get(fileIds[0])).toBe(false);
|
||||
expect(accessMap.get(fileIds[1])).toBe(false);
|
||||
});
|
||||
|
||||
it('should grant access when user has VIEW permission', async () => {
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const agentId = uuidv4();
|
||||
const fileIds = [uuidv4(), uuidv4()];
|
||||
|
||||
// Create users
|
||||
await User.create({
|
||||
_id: userId,
|
||||
email: 'user@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
});
|
||||
|
||||
await User.create({
|
||||
_id: authorId,
|
||||
email: 'author@example.com',
|
||||
emailVerified: true,
|
||||
provider: 'local',
|
||||
});
|
||||
|
||||
// Create agent with files
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'View-Only Agent',
|
||||
author: authorId,
|
||||
model: 'gpt-4',
|
||||
provider: 'openai',
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: fileIds,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Grant only VIEW permission to user on the agent
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: userId,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
accessRoleId: AccessRoleIds.AGENT_VIEWER,
|
||||
grantedBy: authorId,
|
||||
});
|
||||
|
||||
// Check access for files
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files/permissions');
|
||||
const accessMap = await hasAccessToFilesViaAgent({
|
||||
userId: userId,
|
||||
role: SystemRoles.USER,
|
||||
fileIds,
|
||||
agentId,
|
||||
});
|
||||
|
||||
expect(accessMap.get(fileIds[0])).toBe(true);
|
||||
expect(accessMap.get(fileIds[1])).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFiles with agent access control', () => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const { z } = require('zod');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createTempChatExpirationDate } = require('@librechat/api');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
const idSchema = z.string().uuid();
|
||||
@@ -10,7 +11,7 @@ const idSchema = z.string().uuid();
|
||||
*
|
||||
* @async
|
||||
* @function saveMessage
|
||||
* @param {ServerRequest} req - The request object containing user information.
|
||||
* @param {Express.Request} req - The request object containing user information.
|
||||
* @param {Object} params - The message data object.
|
||||
* @param {string} params.endpoint - The endpoint where the message originated.
|
||||
* @param {string} params.iconURL - The URL of the sender's icon.
|
||||
@@ -56,7 +57,9 @@ async function saveMessage(req, params, metadata) {
|
||||
|
||||
if (req?.body?.isTemporary) {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({
|
||||
role: req.user.role,
|
||||
});
|
||||
update.expiredAt = createTempChatExpirationDate(appConfig?.interfaceConfig);
|
||||
} catch (err) {
|
||||
logger.error('Error creating temporary chat expiration date:', err);
|
||||
|
||||
@@ -14,6 +14,7 @@ const {
|
||||
} = require('./Message');
|
||||
|
||||
jest.mock('~/server/services/Config/app');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
|
||||
/**
|
||||
* @type {import('mongoose').Model<import('@librechat/data-schemas').IMessage>}
|
||||
@@ -43,11 +44,6 @@ describe('Message Operations', () => {
|
||||
|
||||
mockReq = {
|
||||
user: { id: 'user123' },
|
||||
config: {
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24, // Default 24 hours
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockMessageData = {
|
||||
@@ -331,7 +327,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should save a message with expiredAt when isTemporary is true', async () => {
|
||||
// Mock app config with 24 hour retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -376,7 +376,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should use custom retention period from config', async () => {
|
||||
// Mock app config with 48 hour retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 48;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 48,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -399,7 +403,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should handle minimum retention period (1 hour)', async () => {
|
||||
// Mock app config with less than minimum retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 0.5; // Half hour - should be clamped to 1 hour
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 0.5, // Half hour - should be clamped to 1 hour
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -422,7 +430,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should handle maximum retention period (8760 hours)', async () => {
|
||||
// Mock app config with more than maximum retention
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 10000; // Should be clamped to 8760 hours
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 10000, // Should be clamped to 8760 hours
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -443,36 +455,22 @@ describe('Message Operations', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing config gracefully', async () => {
|
||||
// Simulate missing config - should use default retention period
|
||||
delete mockReq.config;
|
||||
it('should handle getAppConfig errors gracefully', async () => {
|
||||
// Mock getAppConfig to throw an error
|
||||
getAppConfig.mockRejectedValue(new Error('Config service unavailable'));
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
const beforeSave = new Date();
|
||||
const result = await saveMessage(mockReq, mockMessageData);
|
||||
const afterSave = new Date();
|
||||
|
||||
// Should still save the message with default retention period (30 days)
|
||||
// Should still save the message but with expiredAt as null
|
||||
expect(result.messageId).toBe('msg123');
|
||||
expect(result.expiredAt).toBeDefined();
|
||||
expect(result.expiredAt).toBeInstanceOf(Date);
|
||||
|
||||
// Verify expiredAt is approximately 30 days in the future (720 hours)
|
||||
const expectedExpirationTime = new Date(beforeSave.getTime() + 720 * 60 * 60 * 1000);
|
||||
const actualExpirationTime = new Date(result.expiredAt);
|
||||
|
||||
expect(actualExpirationTime.getTime()).toBeGreaterThanOrEqual(
|
||||
expectedExpirationTime.getTime() - 1000,
|
||||
);
|
||||
expect(actualExpirationTime.getTime()).toBeLessThanOrEqual(
|
||||
new Date(afterSave.getTime() + 720 * 60 * 60 * 1000 + 1000).getTime(),
|
||||
);
|
||||
expect(result.expiredAt).toBeNull();
|
||||
});
|
||||
|
||||
it('should use default retention when config is not provided', async () => {
|
||||
// Mock getAppConfig to return empty config
|
||||
mockReq.config = {}; // Empty config
|
||||
getAppConfig.mockResolvedValue({});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
|
||||
@@ -495,7 +493,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should not update expiredAt on message update', async () => {
|
||||
// First save a temporary message
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const savedMessage = await saveMessage(mockReq, mockMessageData);
|
||||
@@ -518,7 +520,11 @@ describe('Message Operations', () => {
|
||||
|
||||
it('should preserve expiredAt when saving existing temporary message', async () => {
|
||||
// First save a temporary message
|
||||
mockReq.config.interfaceConfig.temporaryChatRetention = 24;
|
||||
getAppConfig.mockResolvedValue({
|
||||
interfaceConfig: {
|
||||
temporaryChatRetention: 24,
|
||||
},
|
||||
});
|
||||
|
||||
mockReq.body = { isTemporary: true };
|
||||
const firstSave = await saveMessage(mockReq, mockMessageData);
|
||||
|
||||
@@ -269,7 +269,7 @@ async function getListPromptGroupsByAccess({
|
||||
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
||||
|
||||
// Add cursor condition
|
||||
if (after && typeof after === 'string' && after !== 'undefined' && after !== 'null') {
|
||||
if (after) {
|
||||
try {
|
||||
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
||||
const { updatedAt, _id } = cursor;
|
||||
|
||||
@@ -118,7 +118,7 @@ const addIntervalToDate = (date, value, unit) => {
|
||||
* @async
|
||||
* @function
|
||||
* @param {Object} params - The function parameters.
|
||||
* @param {ServerRequest} params.req - The Express request object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Express.Response} params.res - The Express response object.
|
||||
* @param {Object} params.txData - The transaction data.
|
||||
* @param {string} params.txData.user - The user ID or identifier.
|
||||
|
||||
@@ -1,9 +1,47 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { buildTree } = require('librechat-data-provider');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { getMessages, bulkSaveMessages } = require('./Message');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
// Original version of buildTree function
|
||||
function buildTree({ messages, fileMap }) {
|
||||
if (messages === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const messageMap = {};
|
||||
const rootMessages = [];
|
||||
const childrenCount = {};
|
||||
|
||||
messages.forEach((message) => {
|
||||
const parentId = message.parentMessageId ?? '';
|
||||
childrenCount[parentId] = (childrenCount[parentId] || 0) + 1;
|
||||
|
||||
const extendedMessage = {
|
||||
...message,
|
||||
children: [],
|
||||
depth: 0,
|
||||
siblingIndex: childrenCount[parentId] - 1,
|
||||
};
|
||||
|
||||
if (message.files && fileMap) {
|
||||
extendedMessage.files = message.files.map((file) => fileMap[file.file_id ?? ''] ?? file);
|
||||
}
|
||||
|
||||
messageMap[message.messageId] = extendedMessage;
|
||||
|
||||
const parentMessage = messageMap[parentId];
|
||||
if (parentMessage) {
|
||||
parentMessage.children.push(extendedMessage);
|
||||
extendedMessage.depth = parentMessage.depth + 1;
|
||||
} else {
|
||||
rootMessages.push(extendedMessage);
|
||||
}
|
||||
});
|
||||
|
||||
return rootMessages;
|
||||
}
|
||||
|
||||
let mongod;
|
||||
beforeAll(async () => {
|
||||
mongod = await MongoMemoryServer.create();
|
||||
|
||||
@@ -3,7 +3,7 @@ const bcrypt = require('bcryptjs');
|
||||
/**
|
||||
* Compares the provided password with the user's password.
|
||||
*
|
||||
* @param {IUser} user - The user to compare the password for.
|
||||
* @param {MongoUser} user - The user to compare the password for.
|
||||
* @param {string} candidatePassword - The password to test against the user's password.
|
||||
* @returns {Promise<boolean>} A promise that resolves to a boolean indicating if the password matches.
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.8.0-rc3",
|
||||
"version": "v0.8.0-rc2",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -44,12 +44,12 @@
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.47",
|
||||
"@langchain/core": "^0.3.72",
|
||||
"@langchain/core": "^0.3.62",
|
||||
"@langchain/google-genai": "^0.2.13",
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/openai": "^0.5.18",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^3.0.0-rc10",
|
||||
"@librechat/agents": "^2.4.76",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||
@@ -97,6 +97,7 @@
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^5.10.1",
|
||||
"openai-chat-tokens": "^0.2.8",
|
||||
"openid-client": "^6.5.0",
|
||||
"passport": "^0.6.0",
|
||||
"passport-apple": "^2.0.2",
|
||||
|
||||
27
api/server/controllers/OverrideController.js
Normal file
27
api/server/controllers/OverrideController.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { loadOverrideConfig } = require('~/server/services/Config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
async function overrideController(req, res) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
let overrideConfig = await cache.get(CacheKeys.OVERRIDE_CONFIG);
|
||||
if (overrideConfig) {
|
||||
res.send(overrideConfig);
|
||||
return;
|
||||
} else if (overrideConfig === false) {
|
||||
res.send(false);
|
||||
return;
|
||||
}
|
||||
overrideConfig = await loadOverrideConfig();
|
||||
const { endpointsConfig, modelsConfig } = overrideConfig;
|
||||
if (endpointsConfig) {
|
||||
await cache.set(CacheKeys.ENDPOINT_CONFIG, endpointsConfig);
|
||||
}
|
||||
if (modelsConfig) {
|
||||
await cache.set(CacheKeys.MODELS_CONFIG, modelsConfig);
|
||||
}
|
||||
await cache.set(CacheKeys.OVERRIDE_CONFIG, overrideConfig);
|
||||
res.send(JSON.stringify(overrideConfig));
|
||||
}
|
||||
|
||||
module.exports = overrideController;
|
||||
@@ -4,13 +4,11 @@ const {
|
||||
getToolkitKey,
|
||||
checkPluginAuth,
|
||||
filterUniquePlugins,
|
||||
convertMCPToolToPlugin,
|
||||
convertMCPToolsToPlugins,
|
||||
} = require('@librechat/api');
|
||||
const { getCachedTools, setCachedTools, mergeUserTools } = require('~/server/services/Config');
|
||||
const { getCachedTools, getAppConfig } = require('~/server/services/Config');
|
||||
const { availableTools, toolkits } = require('~/app/clients/tools');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getMCPManager } = require('~/config');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const getAvailablePluginsController = async (req, res) => {
|
||||
@@ -25,7 +23,6 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
/** @type {{ filteredTools: string[], includedTools: string[] }} */
|
||||
const { filteredTools = [], includedTools = [] } = appConfig;
|
||||
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||
const pluginManifest = availableTools;
|
||||
|
||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||
@@ -51,6 +48,45 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
function createServerToolsCallback() {
|
||||
/**
|
||||
* @param {string} serverName
|
||||
* @param {TPlugin[] | null} serverTools
|
||||
*/
|
||||
return async function (serverName, serverTools) {
|
||||
try {
|
||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||
if (!serverName || !mcpToolsCache) {
|
||||
return;
|
||||
}
|
||||
await mcpToolsCache.set(serverName, serverTools);
|
||||
logger.debug(`MCP tools for ${serverName} added to cache.`);
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving MCP tools from cache:', error);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createGetServerTools() {
|
||||
/**
|
||||
* Retrieves cached server tools
|
||||
* @param {string} serverName
|
||||
* @returns {Promise<TPlugin[] | null>}
|
||||
*/
|
||||
return async function (serverName) {
|
||||
try {
|
||||
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
|
||||
if (!mcpToolsCache) {
|
||||
return null;
|
||||
}
|
||||
return await mcpToolsCache.get(serverName);
|
||||
} catch (error) {
|
||||
logger.error('Error retrieving MCP tools from cache:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
|
||||
*
|
||||
@@ -66,71 +102,44 @@ const getAvailablePluginsController = async (req, res) => {
|
||||
const getAvailableTools = async (req, res) => {
|
||||
try {
|
||||
const userId = req.user?.id;
|
||||
if (!userId) {
|
||||
logger.warn('[getAvailableTools] User ID not found in request');
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
const appConfig = await getAppConfig();
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cachedToolsArray = await cache.get(CacheKeys.TOOLS);
|
||||
const cachedUserTools = await getCachedTools({ userId });
|
||||
|
||||
const appConfig = req.config ?? (await getAppConfig({ role: req.user?.role }));
|
||||
const mcpManager = getMCPManager();
|
||||
const userPlugins = convertMCPToolsToPlugins({ functionTools: cachedUserTools, mcpManager });
|
||||
|
||||
/** @type {TPlugin[]} */
|
||||
let mcpPlugins;
|
||||
if (appConfig?.mcpConfig) {
|
||||
const mcpManager = getMCPManager();
|
||||
mcpPlugins =
|
||||
cachedUserTools != null
|
||||
? convertMCPToolsToPlugins({ functionTools: cachedUserTools, mcpManager })
|
||||
: undefined;
|
||||
}
|
||||
|
||||
if (
|
||||
cachedToolsArray != null &&
|
||||
(appConfig?.mcpConfig != null ? mcpPlugins != null && mcpPlugins.length > 0 : true)
|
||||
) {
|
||||
const dedupedTools = filterUniquePlugins([...(mcpPlugins ?? []), ...cachedToolsArray]);
|
||||
if (cachedToolsArray != null && userPlugins != null) {
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...cachedToolsArray]);
|
||||
res.status(200).json(dedupedTools);
|
||||
return;
|
||||
}
|
||||
|
||||
/** @type {Record<string, FunctionTool> | null} Get tool definitions to filter which tools are actually available */
|
||||
let toolDefinitions = await getCachedTools({ includeGlobal: true });
|
||||
let prelimCachedTools;
|
||||
|
||||
/** @type {import('@librechat/api').LCManifestTool[]} */
|
||||
let pluginManifest = availableTools;
|
||||
|
||||
if (appConfig?.mcpConfig != null) {
|
||||
try {
|
||||
const mcpManager = getMCPManager();
|
||||
const mcpTools = await mcpManager.getAllToolFunctions(userId);
|
||||
prelimCachedTools = prelimCachedTools ?? {};
|
||||
for (const [toolKey, toolData] of Object.entries(mcpTools)) {
|
||||
const plugin = convertMCPToolToPlugin({
|
||||
toolKey,
|
||||
toolData,
|
||||
mcpManager,
|
||||
});
|
||||
if (plugin) {
|
||||
pluginManifest.push(plugin);
|
||||
}
|
||||
prelimCachedTools[toolKey] = toolData;
|
||||
}
|
||||
await mergeUserTools({ userId, cachedUserTools, userTools: prelimCachedTools });
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
|
||||
const serverToolsCallback = createServerToolsCallback();
|
||||
const getServerTools = createGetServerTools();
|
||||
const mcpTools = await mcpManager.loadManifestTools({
|
||||
flowManager,
|
||||
serverToolsCallback,
|
||||
getServerTools,
|
||||
});
|
||||
pluginManifest = [...mcpTools, ...pluginManifest];
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[getAvailableTools] Error loading MCP Tools, servers may still be initializing:',
|
||||
error,
|
||||
);
|
||||
}
|
||||
} else if (prelimCachedTools != null) {
|
||||
await setCachedTools(prelimCachedTools, { isGlobal: true });
|
||||
}
|
||||
|
||||
/** @type {TPlugin[]} Deduplicate and authenticate plugins */
|
||||
/** @type {TPlugin[]} */
|
||||
const uniquePlugins = filterUniquePlugins(pluginManifest);
|
||||
|
||||
const authenticatedPlugins = uniquePlugins.map((plugin) => {
|
||||
if (checkPluginAuth(plugin)) {
|
||||
return { ...plugin, authenticated: true };
|
||||
@@ -139,7 +148,8 @@ const getAvailableTools = async (req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
/** Filter plugins based on availability and add MCP-specific auth config */
|
||||
const toolDefinitions = (await getCachedTools({ includeGlobal: true })) || {};
|
||||
|
||||
const toolsOutput = [];
|
||||
for (const plugin of authenticatedPlugins) {
|
||||
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
|
||||
@@ -155,36 +165,41 @@ const getAvailableTools = async (req, res) => {
|
||||
|
||||
const toolToAdd = { ...plugin };
|
||||
|
||||
if (plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
||||
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
const serverConfig = appConfig?.mcpConfig?.[serverName];
|
||||
if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) {
|
||||
toolsOutput.push(toolToAdd);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (serverConfig?.customUserVars) {
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
if (customVarKeys.length === 0) {
|
||||
toolToAdd.authConfig = [];
|
||||
toolToAdd.authenticated = true;
|
||||
} else {
|
||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(
|
||||
([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}),
|
||||
);
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
}
|
||||
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
const serverConfig = appConfig?.mcpConfig?.[serverName];
|
||||
|
||||
if (!serverConfig?.customUserVars) {
|
||||
toolsOutput.push(toolToAdd);
|
||||
continue;
|
||||
}
|
||||
|
||||
const customVarKeys = Object.keys(serverConfig.customUserVars);
|
||||
|
||||
if (customVarKeys.length === 0) {
|
||||
toolToAdd.authConfig = [];
|
||||
toolToAdd.authenticated = true;
|
||||
} else {
|
||||
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
|
||||
authField: key,
|
||||
label: value.title || key,
|
||||
description: value.description || '',
|
||||
}));
|
||||
toolToAdd.authenticated = false;
|
||||
}
|
||||
|
||||
toolsOutput.push(toolToAdd);
|
||||
}
|
||||
|
||||
const finalTools = filterUniquePlugins(toolsOutput);
|
||||
await cache.set(CacheKeys.TOOLS, finalTools);
|
||||
|
||||
const dedupedTools = filterUniquePlugins([...(mcpPlugins ?? []), ...finalTools]);
|
||||
const dedupedTools = filterUniquePlugins([...userPlugins, ...finalTools]);
|
||||
|
||||
res.status(200).json(dedupedTools);
|
||||
} catch (error) {
|
||||
logger.error('[getAvailableTools]', error);
|
||||
|
||||
@@ -6,26 +6,22 @@ jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getCachedTools: jest.fn(),
|
||||
getAppConfig: jest.fn().mockResolvedValue({
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
}),
|
||||
setCachedTools: jest.fn(),
|
||||
mergeUserTools: jest.fn(),
|
||||
getAppConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
// loadAndFormatTools mock removed - no longer used in PluginController
|
||||
jest.mock('~/server/services/ToolService', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/config', () => ({
|
||||
getMCPManager: jest.fn(() => ({
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
getRawConfig: jest.fn(),
|
||||
})),
|
||||
getFlowStateManager: jest.fn(),
|
||||
}));
|
||||
@@ -39,87 +35,74 @@ jest.mock('~/cache', () => ({
|
||||
getLogStores: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
getToolkitKey: jest.fn(),
|
||||
checkPluginAuth: jest.fn(),
|
||||
filterUniquePlugins: jest.fn(),
|
||||
convertMCPToolsToPlugins: jest.fn(),
|
||||
}));
|
||||
|
||||
// Import the actual module with the function we want to test
|
||||
const { getAvailableTools, getAvailablePluginsController } = require('./PluginController');
|
||||
const {
|
||||
filterUniquePlugins,
|
||||
checkPluginAuth,
|
||||
convertMCPToolsToPlugins,
|
||||
getToolkitKey,
|
||||
} = require('@librechat/api');
|
||||
|
||||
describe('PluginController', () => {
|
||||
let mockReq, mockRes, mockCache;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockReq = {
|
||||
user: { id: 'test-user-id' },
|
||||
config: {
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
},
|
||||
};
|
||||
mockReq = { user: { id: 'test-user-id' } };
|
||||
mockRes = { status: jest.fn().mockReturnThis(), json: jest.fn() };
|
||||
mockCache = { get: jest.fn(), set: jest.fn() };
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
|
||||
// Clear availableTools and toolkits arrays before each test
|
||||
require('~/app/clients/tools').availableTools.length = 0;
|
||||
require('~/app/clients/tools').toolkits.length = 0;
|
||||
|
||||
// Reset getCachedTools mock to ensure clean state
|
||||
getCachedTools.mockReset();
|
||||
|
||||
// Reset getAppConfig mock to ensure clean state with default values
|
||||
getAppConfig.mockReset();
|
||||
getAppConfig.mockResolvedValue({
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAvailablePluginsController', () => {
|
||||
beforeEach(() => {
|
||||
getAppConfig.mockResolvedValue({
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to remove duplicate plugins', async () => {
|
||||
// Add plugins with duplicates to availableTools
|
||||
const mockPlugins = [
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First' },
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First duplicate' },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
// Configure getAppConfig to return the expected config
|
||||
getAppConfig.mockResolvedValueOnce({
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
});
|
||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(filterUniquePlugins).toHaveBeenCalled();
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// The real filterUniquePlugins should have removed the duplicate
|
||||
expect(responseData).toHaveLength(2);
|
||||
expect(responseData[0].pluginKey).toBe('key1');
|
||||
expect(responseData[1].pluginKey).toBe('key2');
|
||||
// The response includes authenticated: true for each plugin when checkPluginAuth returns true
|
||||
expect(mockRes.json).toHaveBeenCalledWith([
|
||||
{ name: 'Plugin1', pluginKey: 'key1', description: 'First', authenticated: true },
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second', authenticated: true },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify plugin authentication', async () => {
|
||||
// checkPluginAuth returns false for plugins without authConfig
|
||||
// so authenticated property won't be added
|
||||
const mockPlugin = { name: 'Plugin1', pluginKey: 'key1', description: 'First' };
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
// Configure getAppConfig to return the expected config
|
||||
getAppConfig.mockResolvedValueOnce({
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
});
|
||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
||||
checkPluginAuth.mockReturnValueOnce(true);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
|
||||
expect(responseData[0].authenticated).toBeUndefined();
|
||||
expect(responseData[0].authenticated).toBe(true);
|
||||
});
|
||||
|
||||
it('should return cached plugins when available', async () => {
|
||||
@@ -131,7 +114,8 @@ describe('PluginController', () => {
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
// When cache is hit, we return immediately without processing
|
||||
expect(filterUniquePlugins).not.toHaveBeenCalled();
|
||||
expect(checkPluginAuth).not.toHaveBeenCalled();
|
||||
expect(mockRes.json).toHaveBeenCalledWith(cachedPlugins);
|
||||
});
|
||||
|
||||
@@ -141,14 +125,13 @@ describe('PluginController', () => {
|
||||
{ name: 'Plugin2', pluginKey: 'key2', description: 'Second' },
|
||||
];
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(...mockPlugins);
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
// Configure getAppConfig to return config with includedTools
|
||||
getAppConfig.mockResolvedValueOnce({
|
||||
getAppConfig.mockResolvedValue({
|
||||
filteredTools: [],
|
||||
includedTools: ['key1'],
|
||||
});
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
filterUniquePlugins.mockReturnValue(mockPlugins);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
@@ -162,126 +145,67 @@ describe('PluginController', () => {
|
||||
it('should use convertMCPToolsToPlugins for user-specific MCP tools', async () => {
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}server1`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `tool1${Constants.mcp_delimiter}server1`,
|
||||
description: 'Tool 1',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
function: { name: 'tool1', description: 'Tool 1' },
|
||||
},
|
||||
};
|
||||
const mockConvertedPlugins = [
|
||||
{
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}server1`,
|
||||
description: 'Tool 1',
|
||||
},
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
mockReq.config = {
|
||||
mcpConfig: {
|
||||
server1: {},
|
||||
},
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
|
||||
// Mock MCP manager to return empty tools initially (since getAllToolFunctions is called)
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
// Mock second call to return tool definitions (includeGlobal: true)
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue(mockConvertedPlugins);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(responseData).toBeDefined();
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
expect(responseData.length).toBeGreaterThan(0);
|
||||
const convertedTool = responseData.find(
|
||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}server1`,
|
||||
);
|
||||
expect(convertedTool).toBeDefined();
|
||||
// The real convertMCPToolsToPlugins extracts the name from the delimiter
|
||||
expect(convertedTool.name).toBe('tool1');
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: mockUserTools,
|
||||
mcpManager: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('should use filterUniquePlugins to deduplicate combined tools', async () => {
|
||||
const mockUserTools = {
|
||||
'user-tool': {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'user-tool',
|
||||
description: 'User tool',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockCachedPlugins = [
|
||||
{ name: 'user-tool', pluginKey: 'user-tool', description: 'Duplicate user tool' },
|
||||
const mockUserPlugins = [
|
||||
{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' },
|
||||
];
|
||||
const mockManifestPlugins = [
|
||||
{ name: 'ManifestTool', pluginKey: 'manifest-tool', description: 'Manifest tool' },
|
||||
];
|
||||
|
||||
mockCache.get.mockResolvedValue(mockCachedPlugins);
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
|
||||
// Mock second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
mockCache.get.mockResolvedValue(mockManifestPlugins);
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
convertMCPToolsToPlugins.mockReturnValue(mockUserPlugins);
|
||||
filterUniquePlugins.mockReturnValue([...mockUserPlugins, ...mockManifestPlugins]);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
// The real filterUniquePlugins should have deduplicated tools with same pluginKey
|
||||
const userToolCount = responseData.filter((tool) => tool.pluginKey === 'user-tool').length;
|
||||
expect(userToolCount).toBe(1);
|
||||
// Should be called to deduplicate the combined array
|
||||
expect(filterUniquePlugins).toHaveBeenLastCalledWith([
|
||||
...mockUserPlugins,
|
||||
...mockManifestPlugins,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use checkPluginAuth to verify authentication status', async () => {
|
||||
// Add a plugin to availableTools that will be checked
|
||||
const mockPlugin = {
|
||||
name: 'Tool1',
|
||||
pluginKey: 'tool1',
|
||||
description: 'Tool 1',
|
||||
// No authConfig means checkPluginAuth returns false
|
||||
};
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockPlugin);
|
||||
const mockPlugin = { name: 'Tool1', pluginKey: 'tool1', description: 'Tool 1' };
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
// First call returns null for user tools
|
||||
getCachedTools.mockResolvedValueOnce(null);
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockPlugin]);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
// Second call (with includeGlobal: true) returns the tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
tool1: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'tool1',
|
||||
description: 'Tool 1',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
// Mock getCachedTools second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({ tool1: true });
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
const tool = responseData.find((t) => t.pluginKey === 'tool1');
|
||||
expect(tool).toBeDefined();
|
||||
// The real checkPluginAuth returns false for plugins without authConfig, so authenticated property is not added
|
||||
expect(tool.authenticated).toBeUndefined();
|
||||
expect(checkPluginAuth).toHaveBeenCalledWith(mockPlugin);
|
||||
});
|
||||
|
||||
it('should use getToolkitKey for toolkit validation', async () => {
|
||||
@@ -292,42 +216,21 @@ describe('PluginController', () => {
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||
|
||||
// Mock toolkits to have a mapping
|
||||
require('~/app/clients/tools').toolkits.push({
|
||||
name: 'Toolkit1',
|
||||
pluginKey: 'toolkit1',
|
||||
tools: ['toolkit1_function'],
|
||||
});
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
// First call returns null for user tools
|
||||
getCachedTools.mockResolvedValueOnce(null);
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
getToolkitKey.mockReturnValue('toolkit1');
|
||||
|
||||
// Second call (with includeGlobal: true) returns the tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
toolkit1_function: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: 'toolkit1_function',
|
||||
description: 'Toolkit function',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
// Mock getCachedTools second call to return tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({
|
||||
toolkit1_function: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
const toolkit = responseData.find((t) => t.pluginKey === 'toolkit1');
|
||||
expect(toolkit).toBeDefined();
|
||||
expect(getToolkitKey).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -337,40 +240,39 @@ describe('PluginController', () => {
|
||||
|
||||
const functionTools = {
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
description: 'A test tool',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
function: { name: 'test-tool', description: 'A test tool' },
|
||||
},
|
||||
};
|
||||
|
||||
// Mock the MCP manager to return tools and server config
|
||||
const mockConvertedPlugin = {
|
||||
name: 'test-tool',
|
||||
pluginKey: `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
description: 'A test tool',
|
||||
icon: serverConfig?.iconPath,
|
||||
authenticated: true,
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
// Mock the MCP manager to return the server config
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue(functionTools),
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
getRawConfig: jest.fn().mockReturnValue(serverConfig),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
// First call returns empty user tools
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Mock getAppConfig to return the mcpConfig
|
||||
mockReq.config = {
|
||||
mcpConfig: {
|
||||
'test-server': serverConfig,
|
||||
},
|
||||
};
|
||||
|
||||
// Second call (with includeGlobal: true) returns the tool definitions
|
||||
getCachedTools.mockResolvedValueOnce(functionTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue([mockConvertedPlugin]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
getToolkitKey.mockReturnValue(undefined);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`test-tool${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
return responseData.find(
|
||||
(tool) => tool.pluginKey === `test-tool${Constants.mcp_delimiter}test-server`,
|
||||
);
|
||||
return responseData.find((tool) => tool.name === 'test-tool');
|
||||
};
|
||||
|
||||
it('should set plugin.icon when iconPath is defined', async () => {
|
||||
@@ -400,43 +302,46 @@ describe('PluginController', () => {
|
||||
},
|
||||
};
|
||||
|
||||
// Mock MCP tools returned by getAllToolFunctions
|
||||
const mcpToolFunctions = {
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
parameters: {},
|
||||
},
|
||||
// We need to test the actual flow where MCP manager tools are included
|
||||
const mcpManagerTools = [
|
||||
{
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
authenticated: true,
|
||||
},
|
||||
};
|
||||
];
|
||||
|
||||
// Mock the MCP manager to return tools
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue(mcpToolFunctions),
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
customUserVars: {
|
||||
API_KEY: { title: 'API Key', description: 'Your API key' },
|
||||
},
|
||||
}),
|
||||
loadManifestTools: jest.fn().mockResolvedValue(mcpManagerTools),
|
||||
getRawConfig: jest.fn(),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
mockReq.config = appConfig;
|
||||
getAppConfig.mockResolvedValue(appConfig);
|
||||
|
||||
// First call returns user tools (empty in this case)
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Second call (with includeGlobal: true) returns tool definitions including our MCP tool
|
||||
getCachedTools.mockResolvedValueOnce(mcpToolFunctions);
|
||||
// Mock convertMCPToolsToPlugins to return empty array for user tools
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
|
||||
// Mock filterUniquePlugins to pass through
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
|
||||
// Mock checkPluginAuth
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
// Second call returns tool definitions
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
|
||||
// Find the MCP tool in the response
|
||||
const mcpTool = responseData.find(
|
||||
@@ -472,36 +377,24 @@ describe('PluginController', () => {
|
||||
|
||||
it('should handle null cachedTools and cachedUserTools', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
// First call returns null for user tools
|
||||
getCachedTools.mockResolvedValueOnce(null);
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
|
||||
// Mock MCP manager to return no tools
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
// Second call (with includeGlobal: true) returns empty object instead of null
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
getCachedTools.mockResolvedValue(null);
|
||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle null values gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: null,
|
||||
mcpManager: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle when getCachedTools returns undefined', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
getCachedTools.mockResolvedValue(undefined);
|
||||
convertMCPToolsToPlugins.mockReturnValue(undefined);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
// Mock getCachedTools to return undefined for both calls
|
||||
getCachedTools.mockReset();
|
||||
@@ -509,72 +402,36 @@ describe('PluginController', () => {
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
// Should handle undefined values gracefully
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([]);
|
||||
expect(convertMCPToolsToPlugins).toHaveBeenCalledWith({
|
||||
functionTools: undefined,
|
||||
mcpManager: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle `cachedToolsArray` and `mcpPlugins` both being defined', async () => {
|
||||
it('should handle cachedToolsArray and userPlugins both being defined', async () => {
|
||||
const cachedTools = [{ name: 'CachedTool', pluginKey: 'cached-tool', description: 'Cached' }];
|
||||
// Use MCP delimiter for the user tool so convertMCPToolsToPlugins works
|
||||
const userTools = {
|
||||
[`user-tool${Constants.mcp_delimiter}server1`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `user-tool${Constants.mcp_delimiter}server1`,
|
||||
description: 'User tool',
|
||||
parameters: {},
|
||||
},
|
||||
},
|
||||
'user-tool': { function: { name: 'user-tool', description: 'User tool' } },
|
||||
};
|
||||
const userPlugins = [{ name: 'UserTool', pluginKey: 'user-tool', description: 'User tool' }];
|
||||
|
||||
mockCache.get.mockResolvedValue(cachedTools);
|
||||
getCachedTools.mockResolvedValueOnce(userTools);
|
||||
mockReq.config = {
|
||||
mcpConfig: {
|
||||
server1: {},
|
||||
},
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
|
||||
// Mock MCP manager to return empty tools initially
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
// The controller expects a second call to getCachedTools
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
'cached-tool': { type: 'function', function: { name: 'cached-tool' } },
|
||||
[`user-tool${Constants.mcp_delimiter}server1`]:
|
||||
userTools[`user-tool${Constants.mcp_delimiter}server1`],
|
||||
});
|
||||
getCachedTools.mockResolvedValue(userTools);
|
||||
convertMCPToolsToPlugins.mockReturnValue(userPlugins);
|
||||
filterUniquePlugins.mockReturnValue([...userPlugins, ...cachedTools]);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
// Should have both cached and user tools
|
||||
expect(responseData.length).toBeGreaterThanOrEqual(2);
|
||||
expect(mockRes.json).toHaveBeenCalledWith([...userPlugins, ...cachedTools]);
|
||||
});
|
||||
|
||||
it('should handle empty toolDefinitions object', async () => {
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
// Reset getCachedTools to ensure clean state
|
||||
getCachedTools.mockReset();
|
||||
getCachedTools.mockResolvedValue({});
|
||||
mockReq.config = {}; // No mcpConfig at all
|
||||
|
||||
// Ensure no plugins are available
|
||||
require('~/app/clients/tools').availableTools.length = 0;
|
||||
|
||||
// Reset MCP manager to default state
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins || []);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
@@ -583,8 +440,8 @@ describe('PluginController', () => {
|
||||
});
|
||||
|
||||
it('should handle MCP tools without customUserVars', async () => {
|
||||
const appConfig = {
|
||||
mcpConfig: {
|
||||
const customConfig = {
|
||||
mcpServers: {
|
||||
'test-server': {
|
||||
// No customUserVars defined
|
||||
},
|
||||
@@ -593,59 +450,54 @@ describe('PluginController', () => {
|
||||
|
||||
const mockUserTools = {
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
parameters: { type: 'object', properties: {} },
|
||||
},
|
||||
function: { name: 'tool1', description: 'Tool 1' },
|
||||
},
|
||||
};
|
||||
|
||||
// Mock the MCP manager to return the tools
|
||||
// Mock the MCP manager to return server config without customUserVars
|
||||
const mockMCPManager = {
|
||||
getAllToolFunctions: jest.fn().mockResolvedValue(mockUserTools),
|
||||
getRawConfig: jest.fn().mockReturnValue({
|
||||
// No customUserVars defined
|
||||
}),
|
||||
loadManifestTools: jest.fn().mockResolvedValue([]),
|
||||
getRawConfig: jest.fn().mockReturnValue(customConfig.mcpServers['test-server']),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMCPManager);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
mockReq.config = appConfig;
|
||||
// First call returns empty user tools
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
|
||||
// Second call (with includeGlobal: true) returns the tool definitions
|
||||
getAppConfig.mockResolvedValue({
|
||||
mcpConfig: customConfig.mcpServers,
|
||||
filteredTools: [],
|
||||
includedTools: [],
|
||||
});
|
||||
getCachedTools.mockResolvedValueOnce(mockUserTools);
|
||||
|
||||
// Ensure no plugins in availableTools for clean test
|
||||
require('~/app/clients/tools').availableTools.length = 0;
|
||||
const mockPlugin = {
|
||||
name: 'tool1',
|
||||
pluginKey: `tool1${Constants.mcp_delimiter}test-server`,
|
||||
description: 'Tool 1',
|
||||
authenticated: true,
|
||||
authConfig: [],
|
||||
};
|
||||
|
||||
convertMCPToolsToPlugins.mockReturnValue([mockPlugin]);
|
||||
filterUniquePlugins.mockImplementation((plugins) => plugins);
|
||||
checkPluginAuth.mockReturnValue(true);
|
||||
|
||||
getCachedTools.mockResolvedValueOnce({
|
||||
[`tool1${Constants.mcp_delimiter}test-server`]: true,
|
||||
});
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
expect(mockRes.status).toHaveBeenCalledWith(200);
|
||||
const responseData = mockRes.json.mock.calls[0][0];
|
||||
expect(Array.isArray(responseData)).toBe(true);
|
||||
expect(responseData.length).toBeGreaterThan(0);
|
||||
|
||||
const mcpTool = responseData.find(
|
||||
(tool) => tool.pluginKey === `tool1${Constants.mcp_delimiter}test-server`,
|
||||
);
|
||||
|
||||
expect(mcpTool).toBeDefined();
|
||||
expect(mcpTool.authenticated).toBe(true);
|
||||
// The actual implementation sets authConfig to empty array when no customUserVars
|
||||
expect(mcpTool.authConfig).toEqual([]);
|
||||
expect(responseData[0].authenticated).toBe(true);
|
||||
// The actual implementation doesn't set authConfig on tools without customUserVars
|
||||
expect(responseData[0].authConfig).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined filteredTools and includedTools', async () => {
|
||||
mockReq.config = {};
|
||||
getAppConfig.mockResolvedValue({});
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
|
||||
// Configure getAppConfig to return config with undefined properties
|
||||
// The controller will use default values [] for filteredTools and includedTools
|
||||
getAppConfig.mockResolvedValueOnce({});
|
||||
filterUniquePlugins.mockReturnValue([]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
|
||||
await getAvailablePluginsController(mockReq, mockRes);
|
||||
|
||||
@@ -661,21 +513,15 @@ describe('PluginController', () => {
|
||||
toolkit: true,
|
||||
};
|
||||
|
||||
// No need to mock app.locals anymore as it's not used
|
||||
|
||||
// Add the toolkit to availableTools
|
||||
require('~/app/clients/tools').availableTools.push(mockToolkit);
|
||||
|
||||
mockCache.get.mockResolvedValue(null);
|
||||
// First call returns empty object
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
mockReq.config = {
|
||||
mcpConfig: null,
|
||||
paths: { structuredTools: '/mock/path' },
|
||||
};
|
||||
getCachedTools.mockResolvedValue({});
|
||||
convertMCPToolsToPlugins.mockReturnValue([]);
|
||||
filterUniquePlugins.mockReturnValue([mockToolkit]);
|
||||
checkPluginAuth.mockReturnValue(false);
|
||||
getToolkitKey.mockReturnValue(undefined);
|
||||
|
||||
// Second call (with includeGlobal: true) returns empty object to avoid null reference error
|
||||
getCachedTools.mockResolvedValueOnce({});
|
||||
// Mock getCachedTools second call to return null
|
||||
getCachedTools.mockResolvedValueOnce({}).mockResolvedValueOnce(null);
|
||||
|
||||
await getAvailableTools(mockReq, mockRes);
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ const { getMCPManager } = require('~/config');
|
||||
|
||||
const getUserController = async (req, res) => {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
/** @type {IUser} */
|
||||
/** @type {MongoUser} */
|
||||
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
|
||||
/**
|
||||
* These fields should not exist due to secure field selection, but deletion
|
||||
@@ -187,7 +187,7 @@ const updateUserPluginsController = async (req, res) => {
|
||||
// Extract server name from pluginKey (format: "mcp_<serverName>")
|
||||
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||
logger.info(
|
||||
`[updateUserPluginsController] Attempting disconnect of MCP server "${serverName}" for user ${user.id} after plugin auth update.`,
|
||||
`[updateUserPluginsController] Disconnecting MCP server ${serverName} for user ${user.id} after plugin auth update for ${pluginKey}.`,
|
||||
);
|
||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
}
|
||||
|
||||
@@ -95,19 +95,6 @@ class ModelEndHandler {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Agent Chain helper
|
||||
* @param {string | undefined} [last_agent_id]
|
||||
* @param {string | undefined} [langgraph_node]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function checkIfLastAgent(last_agent_id, langgraph_node) {
|
||||
if (!last_agent_id || !langgraph_node) {
|
||||
return false;
|
||||
}
|
||||
return langgraph_node?.endsWith(last_agent_id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default handlers for stream events.
|
||||
* @param {Object} options - The options object.
|
||||
@@ -138,7 +125,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
||||
handle: (event, data, metadata) => {
|
||||
if (data?.stepDetails.type === StepTypes.TOOL_CALLS) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) {
|
||||
} else if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
@@ -167,7 +154,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
||||
handle: (event, data, metadata) => {
|
||||
if (data?.delta.type === StepTypes.TOOL_CALLS) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) {
|
||||
} else if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
@@ -185,7 +172,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
||||
handle: (event, data, metadata) => {
|
||||
if (data?.result != null) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) {
|
||||
} else if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
@@ -201,7 +188,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
||||
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
|
||||
*/
|
||||
handle: (event, data, metadata) => {
|
||||
if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) {
|
||||
if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
@@ -217,7 +204,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
||||
* @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata.
|
||||
*/
|
||||
handle: (event, data, metadata) => {
|
||||
if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) {
|
||||
if (metadata?.last_agent_index === metadata?.agent_index) {
|
||||
sendEvent(res, { event, data });
|
||||
} else if (!metadata?.hide_sequential_outputs) {
|
||||
sendEvent(res, { event, data });
|
||||
@@ -259,7 +246,6 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
const attachment = await processFileCitations({
|
||||
user,
|
||||
metadata,
|
||||
appConfig: req.config,
|
||||
toolArtifact: output.artifact,
|
||||
toolCallId: output.tool_call_id,
|
||||
});
|
||||
|
||||
@@ -3,17 +3,20 @@ const { logger } = require('@librechat/data-schemas');
|
||||
const { DynamicStructuredTool } = require('@langchain/core/tools');
|
||||
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
|
||||
const {
|
||||
sendEvent,
|
||||
createRun,
|
||||
Tokenizer,
|
||||
checkAccess,
|
||||
resolveHeaders,
|
||||
getBalanceConfig,
|
||||
hasCustomUserVars,
|
||||
getUserMCPAuthMap,
|
||||
memoryInstructions,
|
||||
formatContentStrings,
|
||||
createMemoryProcessor,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Callback,
|
||||
Providers,
|
||||
GraphEvents,
|
||||
TitleMethod,
|
||||
formatMessage,
|
||||
formatAgentMessages,
|
||||
@@ -32,13 +35,18 @@ const {
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
findPluginAuthsByKeys,
|
||||
getFormattedMemories,
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
|
||||
const { checkCapability, getAppConfig } = require('~/server/services/Config');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { checkCapability } = require('~/server/services/Config');
|
||||
const BaseClient = require('~/app/clients/BaseClient');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { loadAgent } = require('~/models/Agent');
|
||||
@@ -74,6 +82,8 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
||||
return req.body.endpointOption.model_parameters;
|
||||
};
|
||||
|
||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||
|
||||
function createTokenCounter(encoding) {
|
||||
return function (message) {
|
||||
const countTokens = (text) => Tokenizer.getTokenCount(text, encoding);
|
||||
@@ -443,7 +453,7 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
return;
|
||||
}
|
||||
const appConfig = this.options.req.config;
|
||||
const appConfig = await getAppConfig({ role: user.role });
|
||||
const memoryConfig = appConfig.memory;
|
||||
if (!memoryConfig || memoryConfig.disabled === true) {
|
||||
return;
|
||||
@@ -574,7 +584,7 @@ class AgentClient extends BaseClient {
|
||||
if (this.processMemory == null) {
|
||||
return;
|
||||
}
|
||||
const appConfig = this.options.req.config;
|
||||
const appConfig = await getAppConfig({ role: this.options.req.user?.role });
|
||||
const memoryConfig = appConfig.memory;
|
||||
const messageWindowSize = memoryConfig?.messageWindowSize ?? 5;
|
||||
|
||||
@@ -607,7 +617,6 @@ class AgentClient extends BaseClient {
|
||||
await this.chatCompletion({
|
||||
payload,
|
||||
onProgress: opts.onProgress,
|
||||
userMCPAuthMap: opts.userMCPAuthMap,
|
||||
abortController: opts.abortController,
|
||||
});
|
||||
return this.contentParts;
|
||||
@@ -747,13 +756,7 @@ class AgentClient extends BaseClient {
|
||||
return currentMessageTokens > 0 ? currentMessageTokens : originalEstimate;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {string | ChatCompletionMessageParam[]} params.payload
|
||||
* @param {Record<string, Record<string, string>>} [params.userMCPAuthMap]
|
||||
* @param {AbortController} [params.abortController]
|
||||
*/
|
||||
async chatCompletion({ payload, userMCPAuthMap, abortController = null }) {
|
||||
async chatCompletion({ payload, abortController = null }) {
|
||||
/** @type {Partial<GraphRunnableConfig>} */
|
||||
let config;
|
||||
/** @type {ReturnType<createRun>} */
|
||||
@@ -765,7 +768,7 @@ class AgentClient extends BaseClient {
|
||||
abortController = new AbortController();
|
||||
}
|
||||
|
||||
const appConfig = this.options.req.config;
|
||||
const appConfig = await getAppConfig({ role: this.options.req.user?.role });
|
||||
/** @type {AppConfig['endpoints']['agents']} */
|
||||
const agentsEConfig = appConfig.endpoints?.[EModelEndpoint.agents];
|
||||
|
||||
@@ -796,81 +799,140 @@ class AgentClient extends BaseClient {
|
||||
);
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Agent} agent
|
||||
* @param {BaseMessage[]} messages
|
||||
* @param {number} [i]
|
||||
* @param {TMessageContentParts[]} [contentData]
|
||||
* @param {Record<string, number>} [currentIndexCountMap]
|
||||
*/
|
||||
const runAgents = async (messages) => {
|
||||
const agents = [this.options.agent];
|
||||
if (
|
||||
this.agentConfigs &&
|
||||
this.agentConfigs.size > 0 &&
|
||||
((this.options.agent.edges?.length ?? 0) > 0 ||
|
||||
(await checkCapability(this.options.req, AgentCapabilities.chain)))
|
||||
) {
|
||||
agents.push(...this.agentConfigs.values());
|
||||
const runAgent = async (agent, _messages, i = 0, contentData = [], _currentIndexCountMap) => {
|
||||
config.configurable.model = agent.model_parameters.model;
|
||||
const currentIndexCountMap = _currentIndexCountMap ?? indexTokenCountMap;
|
||||
if (i > 0) {
|
||||
this.model = agent.model_parameters.model;
|
||||
}
|
||||
|
||||
if (agents[0].recursion_limit && typeof agents[0].recursion_limit === 'number') {
|
||||
config.recursionLimit = agents[0].recursion_limit;
|
||||
if (i > 0 && config.signal == null) {
|
||||
config.signal = abortController.signal;
|
||||
}
|
||||
if (agent.recursion_limit && typeof agent.recursion_limit === 'number') {
|
||||
config.recursionLimit = agent.recursion_limit;
|
||||
}
|
||||
|
||||
if (
|
||||
agentsEConfig?.maxRecursionLimit &&
|
||||
config.recursionLimit > agentsEConfig?.maxRecursionLimit
|
||||
) {
|
||||
config.recursionLimit = agentsEConfig?.maxRecursionLimit;
|
||||
}
|
||||
config.configurable.agent_id = agent.id;
|
||||
config.configurable.name = agent.name;
|
||||
config.configurable.agent_index = i;
|
||||
const noSystemMessages = noSystemModelRegex.some((regex) =>
|
||||
agent.model_parameters.model.match(regex),
|
||||
);
|
||||
|
||||
// TODO: needs to be added as part of AgentContext initialization
|
||||
// const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||
// const noSystemMessages = noSystemModelRegex.some((regex) =>
|
||||
// agent.model_parameters.model.match(regex),
|
||||
// );
|
||||
// if (noSystemMessages === true && systemContent?.length) {
|
||||
// const latestMessageContent = _messages.pop().content;
|
||||
// if (typeof latestMessageContent !== 'string') {
|
||||
// latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
||||
// _messages.push(new HumanMessage({ content: latestMessageContent }));
|
||||
// } else {
|
||||
// const text = [systemContent, latestMessageContent].join('\n');
|
||||
// _messages.push(new HumanMessage(text));
|
||||
// }
|
||||
// }
|
||||
// let messages = _messages;
|
||||
// if (agent.useLegacyContent === true) {
|
||||
// messages = formatContentStrings(messages);
|
||||
// }
|
||||
// if (
|
||||
// agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
||||
// 'prompt-caching',
|
||||
// )
|
||||
// ) {
|
||||
// messages = addCacheControl(messages);
|
||||
// }
|
||||
const systemMessage = Object.values(agent.toolContextMap ?? {})
|
||||
.join('\n')
|
||||
.trim();
|
||||
|
||||
memoryPromise = this.runMemory(messages);
|
||||
let systemContent = [
|
||||
systemMessage,
|
||||
agent.instructions ?? '',
|
||||
i !== 0 ? (agent.additional_instructions ?? '') : '',
|
||||
]
|
||||
.join('\n')
|
||||
.trim();
|
||||
|
||||
if (noSystemMessages === true) {
|
||||
agent.instructions = undefined;
|
||||
agent.additional_instructions = undefined;
|
||||
} else {
|
||||
agent.instructions = systemContent;
|
||||
agent.additional_instructions = undefined;
|
||||
}
|
||||
|
||||
if (noSystemMessages === true && systemContent?.length) {
|
||||
const latestMessageContent = _messages.pop().content;
|
||||
if (typeof latestMessageContent !== 'string') {
|
||||
latestMessageContent[0].text = [systemContent, latestMessageContent[0].text].join('\n');
|
||||
_messages.push(new HumanMessage({ content: latestMessageContent }));
|
||||
} else {
|
||||
const text = [systemContent, latestMessageContent].join('\n');
|
||||
_messages.push(new HumanMessage(text));
|
||||
}
|
||||
}
|
||||
|
||||
let messages = _messages;
|
||||
if (agent.useLegacyContent === true) {
|
||||
messages = formatContentStrings(messages);
|
||||
}
|
||||
if (
|
||||
agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
||||
'prompt-caching',
|
||||
)
|
||||
) {
|
||||
messages = addCacheControl(messages);
|
||||
}
|
||||
|
||||
if (i === 0) {
|
||||
memoryPromise = this.runMemory(messages);
|
||||
}
|
||||
|
||||
run = await createRun({
|
||||
agents,
|
||||
indexTokenCountMap,
|
||||
agent,
|
||||
req: this.options.req,
|
||||
runId: this.responseMessageId,
|
||||
signal: abortController.signal,
|
||||
customHandlers: this.options.eventHandlers,
|
||||
requestBody: config.configurable.requestBody,
|
||||
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||
});
|
||||
|
||||
if (!run) {
|
||||
throw new Error('Failed to create run');
|
||||
}
|
||||
|
||||
this.run = run;
|
||||
if (userMCPAuthMap != null) {
|
||||
config.configurable.userMCPAuthMap = userMCPAuthMap;
|
||||
if (i === 0) {
|
||||
this.run = run;
|
||||
}
|
||||
|
||||
if (contentData.length) {
|
||||
const agentUpdate = {
|
||||
type: ContentTypes.AGENT_UPDATE,
|
||||
[ContentTypes.AGENT_UPDATE]: {
|
||||
index: contentData.length,
|
||||
runId: this.responseMessageId,
|
||||
agentId: agent.id,
|
||||
},
|
||||
};
|
||||
const streamData = {
|
||||
event: GraphEvents.ON_AGENT_UPDATE,
|
||||
data: agentUpdate,
|
||||
};
|
||||
this.options.aggregateContent(streamData);
|
||||
sendEvent(this.options.res, streamData);
|
||||
contentData.push(agentUpdate);
|
||||
run.Graph.contentData = contentData;
|
||||
}
|
||||
|
||||
try {
|
||||
if (agent.tools?.length && hasCustomUserVars(appConfig)) {
|
||||
config.configurable.userMCPAuthMap = await getUserMCPAuthMap({
|
||||
userId: this.options.req.user.id,
|
||||
tools: agent.tools,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
/** @deprecated Agent Chain */
|
||||
config.configurable.last_agent_id = agents[agents.length - 1].id;
|
||||
await run.processStream({ messages }, config, {
|
||||
keepContent: i !== 0,
|
||||
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||
indexTokenCountMap: currentIndexCountMap,
|
||||
maxContextTokens: agent.maxContextTokens,
|
||||
callbacks: {
|
||||
[Callback.TOOL_ERROR]: logToolError,
|
||||
},
|
||||
@@ -879,30 +941,116 @@ class AgentClient extends BaseClient {
|
||||
config.signal = null;
|
||||
};
|
||||
|
||||
await runAgents(initialMessages);
|
||||
/** @deprecated Agent Chain */
|
||||
if (config.configurable.hide_sequential_outputs) {
|
||||
this.contentParts = this.contentParts.filter((part, index) => {
|
||||
// Include parts that are either:
|
||||
// 1. At or after the finalContentStart index
|
||||
// 2. Of type tool_call
|
||||
// 3. Have tool_call_ids property
|
||||
return (
|
||||
index >= this.contentParts.length - 1 ||
|
||||
part.type === ContentTypes.TOOL_CALL ||
|
||||
part.tool_call_ids
|
||||
);
|
||||
});
|
||||
await runAgent(this.options.agent, initialMessages);
|
||||
let finalContentStart = 0;
|
||||
if (
|
||||
this.agentConfigs &&
|
||||
this.agentConfigs.size > 0 &&
|
||||
(await checkCapability(this.options.req, AgentCapabilities.chain))
|
||||
) {
|
||||
const windowSize = 5;
|
||||
let latestMessage = initialMessages.pop().content;
|
||||
if (typeof latestMessage !== 'string') {
|
||||
latestMessage = latestMessage[0].text;
|
||||
}
|
||||
let i = 1;
|
||||
let runMessages = [];
|
||||
|
||||
const windowIndexCountMap = {};
|
||||
const windowMessages = initialMessages.slice(-windowSize);
|
||||
let currentIndex = 4;
|
||||
for (let i = initialMessages.length - 1; i >= 0; i--) {
|
||||
windowIndexCountMap[currentIndex] = indexTokenCountMap[i];
|
||||
currentIndex--;
|
||||
if (currentIndex < 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const encoding = this.getEncoding();
|
||||
const tokenCounter = createTokenCounter(encoding);
|
||||
for (const [agentId, agent] of this.agentConfigs) {
|
||||
if (abortController.signal.aborted === true) {
|
||||
break;
|
||||
}
|
||||
const currentRun = await run;
|
||||
|
||||
if (
|
||||
i === this.agentConfigs.size &&
|
||||
config.configurable.hide_sequential_outputs === true
|
||||
) {
|
||||
const content = this.contentParts.filter(
|
||||
(part) => part.type === ContentTypes.TOOL_CALL,
|
||||
);
|
||||
|
||||
this.options.res.write(
|
||||
`event: message\ndata: ${JSON.stringify({
|
||||
event: 'on_content_update',
|
||||
data: {
|
||||
runId: this.responseMessageId,
|
||||
content,
|
||||
},
|
||||
})}\n\n`,
|
||||
);
|
||||
}
|
||||
const _runMessages = currentRun.Graph.getRunMessages();
|
||||
finalContentStart = this.contentParts.length;
|
||||
runMessages = runMessages.concat(_runMessages);
|
||||
const contentData = currentRun.Graph.contentData.slice();
|
||||
const bufferString = getBufferString([new HumanMessage(latestMessage), ...runMessages]);
|
||||
if (i === this.agentConfigs.size) {
|
||||
logger.debug(`SEQUENTIAL AGENTS: Last buffer string:\n${bufferString}`);
|
||||
}
|
||||
try {
|
||||
const contextMessages = [];
|
||||
const runIndexCountMap = {};
|
||||
for (let i = 0; i < windowMessages.length; i++) {
|
||||
const message = windowMessages[i];
|
||||
const messageType = message._getType();
|
||||
if (
|
||||
(!agent.tools || agent.tools.length === 0) &&
|
||||
(messageType === 'tool' || (message.tool_calls?.length ?? 0) > 0)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
runIndexCountMap[contextMessages.length] = windowIndexCountMap[i];
|
||||
contextMessages.push(message);
|
||||
}
|
||||
const bufferMessage = new HumanMessage(bufferString);
|
||||
runIndexCountMap[contextMessages.length] = tokenCounter(bufferMessage);
|
||||
const currentMessages = [...contextMessages, bufferMessage];
|
||||
await runAgent(agent, currentMessages, i, contentData, runIndexCountMap);
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[api/server/controllers/agents/client.js #chatCompletion] Error running agent ${agentId} (${i})`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
/** Note: not implemented */
|
||||
if (config.configurable.hide_sequential_outputs !== true) {
|
||||
finalContentStart = 0;
|
||||
}
|
||||
|
||||
this.contentParts = this.contentParts.filter((part, index) => {
|
||||
// Include parts that are either:
|
||||
// 1. At or after the finalContentStart index
|
||||
// 2. Of type tool_call
|
||||
// 3. Have tool_call_ids property
|
||||
return (
|
||||
index >= finalContentStart || part.type === ContentTypes.TOOL_CALL || part.tool_call_ids
|
||||
);
|
||||
});
|
||||
|
||||
try {
|
||||
const attachments = await this.awaitMemoryWithTimeout(memoryPromise);
|
||||
if (attachments && attachments.length > 0) {
|
||||
this.artifactPromises.push(...attachments);
|
||||
}
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
await this.recordCollectedUsage({ context: 'message', balance: balanceConfig });
|
||||
await this.recordCollectedUsage({ context: 'message', balance: appConfig?.balance });
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #chatCompletion] Error recording collected usage',
|
||||
@@ -943,7 +1091,7 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
|
||||
const { req, res, agent } = this.options;
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
let endpoint = agent.endpoint;
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
@@ -1043,20 +1191,6 @@ class AgentClient extends BaseClient {
|
||||
clientOptions.json = true;
|
||||
}
|
||||
|
||||
/** Resolve request-based headers for Custom Endpoints. Note: if this is added to
|
||||
* non-custom endpoints, needs consideration of varying provider header configs.
|
||||
*/
|
||||
if (clientOptions?.configuration?.defaultHeaders != null) {
|
||||
clientOptions.configuration.defaultHeaders = resolveHeaders({
|
||||
headers: clientOptions.configuration.defaultHeaders,
|
||||
body: {
|
||||
messageId: this.responseMessageId,
|
||||
conversationId: this.conversationId,
|
||||
parentMessageId: this.parentMessageId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const titleResult = await this.run.generateTitle({
|
||||
provider,
|
||||
@@ -1095,12 +1229,11 @@ class AgentClient extends BaseClient {
|
||||
};
|
||||
});
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
await this.recordCollectedUsage({
|
||||
collectedUsage,
|
||||
context: 'title',
|
||||
model: clientOptions.model,
|
||||
balance: balanceConfig,
|
||||
balance: appConfig?.balance,
|
||||
}).catch((err) => {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error recording collected usage',
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { Constants, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const AgentClient = require('./client');
|
||||
|
||||
jest.mock('@librechat/agents', () => ({
|
||||
@@ -10,6 +11,10 @@ jest.mock('@librechat/agents', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getAppConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('AgentClient - titleConvo', () => {
|
||||
let client;
|
||||
let mockRun;
|
||||
@@ -39,6 +44,19 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
};
|
||||
|
||||
// Mock getAppConfig to return endpoint configurations
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
// Match the agent endpoint
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titlePrompt: 'Custom title prompt',
|
||||
titleMethod: 'structured',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Mock request and response
|
||||
mockReq = {
|
||||
user: {
|
||||
@@ -49,17 +67,6 @@ describe('AgentClient - titleConvo', () => {
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
key: null,
|
||||
},
|
||||
config: {
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
// Match the agent endpoint
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titlePrompt: 'Custom title prompt',
|
||||
titleMethod: 'structured',
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockRes = {};
|
||||
@@ -143,7 +150,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
|
||||
it('should handle missing endpoint config gracefully', async () => {
|
||||
// Remove endpoint config
|
||||
mockReq.config = { endpoints: {} };
|
||||
getAppConfig.mockResolvedValue({ endpoints: {} });
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -161,7 +168,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
|
||||
it('should use agent model when titleModel is not provided', async () => {
|
||||
// Remove titleModel from config
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titlePrompt: 'Custom title prompt',
|
||||
@@ -170,7 +177,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
// titleModel is omitted
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -182,7 +189,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
});
|
||||
|
||||
it('should not use titleModel when it equals CURRENT_MODEL constant', async () => {
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleModel: Constants.CURRENT_MODEL,
|
||||
@@ -191,7 +198,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titlePromptTemplate: 'Template: {{content}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -234,9 +241,6 @@ describe('AgentClient - titleConvo', () => {
|
||||
model: 'gpt-3.5-turbo',
|
||||
context: 'title',
|
||||
collectedUsage: expect.any(Array),
|
||||
balance: {
|
||||
enabled: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -266,7 +270,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
process.env.ANTHROPIC_API_KEY = 'test-api-key';
|
||||
|
||||
// Add titleEndpoint to the config
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
@@ -276,7 +280,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titlePromptTemplate: 'Custom template',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -303,7 +307,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
|
||||
it('should use all config when endpoint config is missing', async () => {
|
||||
// Set 'all' config without endpoint-specific config
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleModel: 'gpt-4o-mini',
|
||||
@@ -312,7 +316,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titlePromptTemplate: 'All config template: {{content}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -335,7 +339,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
|
||||
it('should prioritize all config over endpoint config for title settings', async () => {
|
||||
// Set both endpoint and 'all' config
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
@@ -350,7 +354,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titlePromptTemplate: 'All config template',
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation text';
|
||||
const abortController = new AbortController();
|
||||
@@ -377,7 +381,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
||||
|
||||
// Set comprehensive 'all' config with all new title options
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleConvo: true,
|
||||
@@ -388,7 +392,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titleEndpoint: EModelEndpoint.anthropic, // Should switch provider to Anthropic
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test conversation about AI and machine learning';
|
||||
const abortController = new AbortController();
|
||||
@@ -434,7 +438,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
mockRun.generateTitle.mockClear();
|
||||
|
||||
// Set 'all' config with specific titleMethod
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleModel: 'gpt-4o-mini',
|
||||
@@ -443,7 +447,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
titlePromptTemplate: `Template for ${method}: {{content}}`,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = `Test conversation for ${method} method`;
|
||||
const abortController = new AbortController();
|
||||
@@ -487,7 +491,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
// Set up Azure endpoint with serverless config
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
titleConvo: true,
|
||||
@@ -516,7 +520,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'grok-3';
|
||||
|
||||
@@ -539,7 +543,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
// Set up Azure endpoint
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
titleConvo: true,
|
||||
@@ -567,7 +571,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'gpt-4o';
|
||||
|
||||
@@ -591,7 +595,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.model_parameters.model = 'gpt-4o-latest';
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
titleConvo: true,
|
||||
@@ -620,7 +624,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'gpt-4o-latest';
|
||||
|
||||
@@ -642,7 +646,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
// Set up Azure endpoint
|
||||
mockAgent.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockAgent.provider = EModelEndpoint.azureOpenAI;
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
titleConvo: true,
|
||||
@@ -698,7 +702,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
mockReq.body.endpoint = EModelEndpoint.azureOpenAI;
|
||||
mockReq.body.model = 'o1-mini';
|
||||
|
||||
@@ -728,7 +732,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
mockReq.body.model = 'gpt-4';
|
||||
|
||||
// Set 'all' config as fallback with a serverless Azure config
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
endpoints: {
|
||||
all: {
|
||||
titleConvo: true,
|
||||
@@ -757,7 +761,7 @@ describe('AgentClient - titleConvo', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const text = 'Test Azure with all config fallback';
|
||||
const abortController = new AbortController();
|
||||
@@ -1040,11 +1044,11 @@ describe('AgentClient - titleConvo', () => {
|
||||
};
|
||||
|
||||
// Mock getAppConfig for memory tests
|
||||
mockReq.config = {
|
||||
getAppConfig.mockResolvedValue({
|
||||
memory: {
|
||||
messageWindowSize: 3,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
mockRes = {};
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerDependencies
|
||||
* @property {ServerRequest} req - The Express request object
|
||||
* @property {Express.Request} req - The Express request object
|
||||
* @property {Express.Response} res - The Express response object
|
||||
* @property {() => ErrorHandlerContext} getContext - Function to get the current context
|
||||
* @property {string} [originPath] - The origin path for the error handler
|
||||
|
||||
@@ -9,24 +9,6 @@ const {
|
||||
const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup');
|
||||
const { saveMessage } = require('~/models');
|
||||
|
||||
function createCloseHandler(abortController) {
|
||||
return function (manual) {
|
||||
if (!manual) {
|
||||
logger.debug('[AgentController] Request closed');
|
||||
}
|
||||
if (!abortController) {
|
||||
return;
|
||||
} else if (abortController.signal.aborted) {
|
||||
return;
|
||||
} else if (abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
abortController.abort();
|
||||
logger.debug('[AgentController] Request aborted on close');
|
||||
};
|
||||
}
|
||||
|
||||
const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let {
|
||||
text,
|
||||
@@ -49,6 +31,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
let userMessagePromise;
|
||||
let getAbortData;
|
||||
let client = null;
|
||||
// Initialize as an array
|
||||
let cleanupHandlers = [];
|
||||
|
||||
const newConvo = !conversationId;
|
||||
@@ -79,7 +62,9 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
// Create a function to handle final cleanup
|
||||
const performCleanup = () => {
|
||||
logger.debug('[AgentController] Performing cleanup');
|
||||
// Make sure cleanupHandlers is an array before iterating
|
||||
if (Array.isArray(cleanupHandlers)) {
|
||||
// Execute all cleanup handlers
|
||||
for (const handler of cleanupHandlers) {
|
||||
try {
|
||||
if (typeof handler === 'function') {
|
||||
@@ -120,33 +105,8 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
};
|
||||
|
||||
try {
|
||||
let prelimAbortController = new AbortController();
|
||||
const prelimCloseHandler = createCloseHandler(prelimAbortController);
|
||||
res.on('close', prelimCloseHandler);
|
||||
const removePrelimHandler = (manual) => {
|
||||
try {
|
||||
prelimCloseHandler(manual);
|
||||
res.removeListener('close', prelimCloseHandler);
|
||||
} catch (e) {
|
||||
logger.error('[AgentController] Error removing close listener', e);
|
||||
}
|
||||
};
|
||||
cleanupHandlers.push(removePrelimHandler);
|
||||
/** @type {{ client: TAgentClient; userMCPAuthMap?: Record<string, Record<string, string>> }} */
|
||||
const result = await initializeClient({
|
||||
req,
|
||||
res,
|
||||
endpointOption,
|
||||
signal: prelimAbortController.signal,
|
||||
});
|
||||
if (prelimAbortController.signal?.aborted) {
|
||||
prelimAbortController = null;
|
||||
throw new Error('Request was aborted before initialization could complete');
|
||||
} else {
|
||||
prelimAbortController = null;
|
||||
removePrelimHandler(true);
|
||||
cleanupHandlers.pop();
|
||||
}
|
||||
/** @type {{ client: TAgentClient }} */
|
||||
const result = await initializeClient({ req, res, endpointOption });
|
||||
client = result.client;
|
||||
|
||||
// Register client with finalization registry if available
|
||||
@@ -178,7 +138,22 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
};
|
||||
|
||||
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
|
||||
const closeHandler = createCloseHandler(abortController);
|
||||
|
||||
// Simple handler to avoid capturing scope
|
||||
const closeHandler = () => {
|
||||
logger.debug('[AgentController] Request closed');
|
||||
if (!abortController) {
|
||||
return;
|
||||
} else if (abortController.signal.aborted) {
|
||||
return;
|
||||
} else if (abortController.requestCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
abortController.abort();
|
||||
logger.debug('[AgentController] Request aborted on close');
|
||||
};
|
||||
|
||||
res.on('close', closeHandler);
|
||||
cleanupHandlers.push(() => {
|
||||
try {
|
||||
@@ -200,7 +175,6 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
||||
abortController,
|
||||
overrideParentMessageId,
|
||||
isEdited: !!editedContent,
|
||||
userMCPAuthMap: result.userMCPAuthMap,
|
||||
responseMessageId: editedResponseMessageId,
|
||||
progressOptions: {
|
||||
res,
|
||||
|
||||
@@ -5,7 +5,6 @@ const { logger } = require('@librechat/data-schemas');
|
||||
const { agentCreateSchema, agentUpdateSchema } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
SystemRoles,
|
||||
FileSources,
|
||||
ResourceType,
|
||||
@@ -32,12 +31,12 @@ const {
|
||||
grantPermission,
|
||||
} = require('~/server/services/PermissionService');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { getCachedTools, getAppConfig } = require('~/server/services/Config');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
|
||||
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
|
||||
const { filterFile } = require('~/server/services/Files/process');
|
||||
const { updateAction, getActions } = require('~/models/Action');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { getCategoriesWithCounts } = require('~/models');
|
||||
|
||||
@@ -70,9 +69,9 @@ const createAgentHandler = async (req, res) => {
|
||||
for (const tool of tools) {
|
||||
if (availableTools[tool]) {
|
||||
agentData.tools.push(tool);
|
||||
} else if (systemTools[tool]) {
|
||||
agentData.tools.push(tool);
|
||||
} else if (tool.includes(Constants.mcp_delimiter)) {
|
||||
}
|
||||
|
||||
if (systemTools[tool]) {
|
||||
agentData.tools.push(tool);
|
||||
}
|
||||
}
|
||||
@@ -488,7 +487,7 @@ const getListAgentsHandler = async (req, res) => {
|
||||
*/
|
||||
const uploadAgentAvatarHandler = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
filterFile({ req, file: req.file, image: true, isAvatar: true });
|
||||
const { agent_id } = req.params;
|
||||
if (!agent_id) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -29,6 +29,7 @@ const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { sendResponse } = require('~/server/middleware/error');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
@@ -47,7 +48,7 @@ const { getOpenAIClient } = require('./helpers');
|
||||
* @returns {void}
|
||||
*/
|
||||
const chatV1 = async (req, res) => {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||
|
||||
const {
|
||||
@@ -252,8 +253,8 @@ const chatV1 = async (req, res) => {
|
||||
}
|
||||
|
||||
const checkBalanceBeforeRun = async () => {
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
if (!balanceConfig?.enabled) {
|
||||
const balance = appConfig?.balance;
|
||||
if (!balance?.enabled) {
|
||||
return;
|
||||
}
|
||||
const transactions =
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -26,6 +26,7 @@ const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||
const { createRunBody } = require('~/server/services/createRunBody');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getTransactions } = require('~/models/Transaction');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
@@ -38,13 +39,13 @@ const { getOpenAIClient } = require('./helpers');
|
||||
* @route POST /
|
||||
* @desc Chat with an assistant
|
||||
* @access Public
|
||||
* @param {ServerRequest} req - The request object, containing the request data.
|
||||
* @param {Express.Request} req - The request object, containing the request data.
|
||||
* @param {Express.Response} res - The response object, used to send back a response.
|
||||
* @returns {void}
|
||||
*/
|
||||
const chatV2 = async (req, res) => {
|
||||
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
|
||||
/** @type {{files: MongoFile[]}} */
|
||||
const {
|
||||
@@ -127,8 +128,8 @@ const chatV2 = async (req, res) => {
|
||||
}
|
||||
|
||||
const checkBalanceBeforeRun = async () => {
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
if (!balanceConfig?.enabled) {
|
||||
const balance = appConfig?.balance;
|
||||
if (!balance?.enabled) {
|
||||
return;
|
||||
}
|
||||
const transactions =
|
||||
|
||||
@@ -22,7 +22,7 @@ const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ErrorHandlerDependencies
|
||||
* @property {ServerRequest} req - The Express request object
|
||||
* @property {Express.Request} req - The Express request object
|
||||
* @property {Express.Response} res - The Express response object
|
||||
* @property {() => ErrorHandlerContext} getContext - Function to get the current context
|
||||
* @property {string} [originPath] - The origin path for the error handler
|
||||
|
||||
@@ -7,11 +7,11 @@ const {
|
||||
const {
|
||||
initializeClient: initAzureClient,
|
||||
} = require('~/server/services/Endpoints/azureAssistants');
|
||||
const { getEndpointsConfig, getAppConfig } = require('~/server/services/Config');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { getEndpointsConfig } = require('~/server/services/Config');
|
||||
|
||||
/**
|
||||
* @param {ServerRequest} req
|
||||
* @param {Express.Request} req
|
||||
* @param {string} [endpoint]
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
@@ -210,7 +210,7 @@ async function getOpenAIClient({ req, res, endpointOption, initAppClient, overri
|
||||
* @returns {Promise<AssistantListResponse>} 200 - success response - application/json
|
||||
*/
|
||||
const fetchAssistants = async ({ req, res, overrideEndpoint }) => {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const {
|
||||
limit = 100,
|
||||
order = 'desc',
|
||||
|
||||
@@ -5,9 +5,9 @@ const { uploadImageBuffer, filterFile } = require('~/server/services/Files/proce
|
||||
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { deleteAssistantActions } = require('~/server/services/ActionService');
|
||||
const { getCachedTools, getAppConfig } = require('~/server/services/Config');
|
||||
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
|
||||
const { getOpenAIClient, fetchAssistants } = require('./helpers');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { manifestToolMap } = require('~/app/clients/tools');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
|
||||
@@ -258,7 +258,7 @@ function filterAssistantDocs({ documents, userId, assistantsConfig = {} }) {
|
||||
*/
|
||||
const getAssistantDocuments = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const endpoint = req.query;
|
||||
const assistantsConfig = appConfig.endpoints?.[endpoint];
|
||||
const documents = await getAssistants(
|
||||
@@ -297,7 +297,7 @@ const getAssistantDocuments = async (req, res) => {
|
||||
*/
|
||||
const uploadAssistantAvatar = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
filterFile({ req, file: req.file, image: true, isAvatar: true });
|
||||
const { assistant_id } = req.params;
|
||||
if (!assistant_id) {
|
||||
|
||||
@@ -94,7 +94,7 @@ const createAssistant = async (req, res) => {
|
||||
/**
|
||||
* Modifies an assistant.
|
||||
* @param {object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {Express.Request} params.req
|
||||
* @param {OpenAIClient} params.openai
|
||||
* @param {string} params.assistant_id
|
||||
* @param {AssistantUpdateParams} params.updateData
|
||||
@@ -199,7 +199,7 @@ const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
|
||||
/**
|
||||
* Modifies an assistant with the resource file id.
|
||||
* @param {object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {Express.Request} params.req
|
||||
* @param {OpenAIClient} params.openai
|
||||
* @param {string} params.assistant_id
|
||||
* @param {string} params.tool_resource
|
||||
@@ -227,7 +227,7 @@ const addResourceFileId = async ({ req, openai, assistant_id, tool_resource, fil
|
||||
/**
|
||||
* Deletes a file ID from an assistant's resource.
|
||||
* @param {object} params
|
||||
* @param {ServerRequest} params.req
|
||||
* @param {Express.Request} params.req
|
||||
* @param {OpenAIClient} params.openai
|
||||
* @param {string} params.assistant_id
|
||||
* @param {string} [params.tool_resource]
|
||||
|
||||
@@ -13,6 +13,7 @@ const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/p
|
||||
const { processCodeOutput } = require('~/server/services/Files/Code/process');
|
||||
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { getMessage } = require('~/models/Message');
|
||||
@@ -35,7 +36,7 @@ const toolAccessPermType = {
|
||||
*/
|
||||
const verifyWebSearchAuth = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const userId = req.user.id;
|
||||
/** @type {TCustomConfig['webSearch']} */
|
||||
const webSearchConfig = appConfig?.webSearch || {};
|
||||
@@ -111,7 +112,7 @@ const verifyToolAuth = async (req, res) => {
|
||||
*/
|
||||
const callTool = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const { toolId = '' } = req.params;
|
||||
if (!fieldsMap[toolId]) {
|
||||
logger.warn(`[${toolId}/call] User ${req.user.id} attempted call to invalid tool`);
|
||||
|
||||
@@ -52,20 +52,7 @@ const startServer = async () => {
|
||||
const appConfig = await getAppConfig();
|
||||
await updateInterfacePermissions(appConfig);
|
||||
const indexPath = path.join(appConfig.paths.dist, 'index.html');
|
||||
let indexHTML = fs.readFileSync(indexPath, 'utf8');
|
||||
|
||||
// In order to provide support to serving the application in a sub-directory
|
||||
// We need to update the base href if the DOMAIN_CLIENT is specified and not the root path
|
||||
if (process.env.DOMAIN_CLIENT) {
|
||||
const clientUrl = new URL(process.env.DOMAIN_CLIENT);
|
||||
const baseHref = clientUrl.pathname.endsWith('/')
|
||||
? clientUrl.pathname
|
||||
: `${clientUrl.pathname}/`;
|
||||
if (baseHref !== '/') {
|
||||
logger.info(`Setting base href to ${baseHref}`);
|
||||
indexHTML = indexHTML.replace(/base href="\/"/, `base href="${baseHref}"`);
|
||||
}
|
||||
}
|
||||
const indexHTML = fs.readFileSync(indexPath, 'utf8');
|
||||
|
||||
app.get('/health', (_req, res) => res.status(200).send('OK'));
|
||||
|
||||
@@ -148,8 +135,7 @@ const startServer = async () => {
|
||||
|
||||
const lang = req.cookies.lang || req.headers['accept-language']?.split(',')[0] || 'en-US';
|
||||
const saneLang = lang.replace(/"/g, '"');
|
||||
let updatedIndexHtml = indexHTML.replace(/lang="en-US"/g, `lang="${saneLang}"`);
|
||||
|
||||
const updatedIndexHtml = indexHTML.replace(/lang="en-US"/g, `lang="${saneLang}"`);
|
||||
res.type('html');
|
||||
res.send(updatedIndexHtml);
|
||||
});
|
||||
|
||||
@@ -5,6 +5,7 @@ const mongoose = require('mongoose');
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
loadCustomConfig: jest.fn(() => Promise.resolve({})),
|
||||
setAppConfig: jest.fn(),
|
||||
getAppConfig: jest.fn().mockResolvedValue({
|
||||
paths: {
|
||||
uploads: '/tmp',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
|
||||
const { isAssistantsEndpoint, ErrorTypes, Constants } = require('librechat-data-provider');
|
||||
const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
const { sendError } = require('~/server/middleware/error');
|
||||
@@ -11,10 +11,6 @@ const { abortRun } = require('./abortRun');
|
||||
|
||||
const abortDataMap = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {string} abortKey
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function cleanupAbortController(abortKey) {
|
||||
if (!abortControllers.has(abortKey)) {
|
||||
return false;
|
||||
@@ -75,20 +71,6 @@ function cleanupAbortController(abortKey) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} abortKey
|
||||
* @returns {function(): void}
|
||||
*/
|
||||
function createCleanUpHandler(abortKey) {
|
||||
return function () {
|
||||
try {
|
||||
cleanupAbortController(abortKey);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function abortMessage(req, res) {
|
||||
let { abortKey, endpoint } = req.body;
|
||||
|
||||
@@ -190,15 +172,11 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
/**
|
||||
* @param {TMessage} userMessage
|
||||
* @param {string} responseMessageId
|
||||
* @param {boolean} [isNewConvo]
|
||||
*/
|
||||
const onStart = (userMessage, responseMessageId, isNewConvo) => {
|
||||
const onStart = (userMessage, responseMessageId) => {
|
||||
sendEvent(res, { message: userMessage, created: true });
|
||||
|
||||
const prelimAbortKey = userMessage?.conversationId ?? req.user.id;
|
||||
const abortKey = isNewConvo
|
||||
? `${prelimAbortKey}${Constants.COMMON_DIVIDER}${Constants.NEW_CONVO}`
|
||||
: prelimAbortKey;
|
||||
const abortKey = userMessage?.conversationId ?? req.user.id;
|
||||
getReqData({ abortKey });
|
||||
const prevRequest = abortControllers.get(abortKey);
|
||||
const { overrideUserMessageId } = req?.body ?? {};
|
||||
@@ -216,7 +194,16 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
};
|
||||
|
||||
abortControllers.set(addedAbortKey, { abortController, ...minimalOptions });
|
||||
const cleanupHandler = createCleanUpHandler(addedAbortKey);
|
||||
|
||||
// Use a simple function for cleanup to avoid capturing context
|
||||
const cleanupHandler = () => {
|
||||
try {
|
||||
cleanupAbortController(addedAbortKey);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
res.on('finish', cleanupHandler);
|
||||
return;
|
||||
}
|
||||
@@ -229,7 +216,16 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
||||
};
|
||||
|
||||
abortControllers.set(abortKey, { abortController, ...minimalOptions });
|
||||
const cleanupHandler = createCleanUpHandler(abortKey);
|
||||
|
||||
// Use a simple function for cleanup to avoid capturing context
|
||||
const cleanupHandler = () => {
|
||||
try {
|
||||
cleanupAbortController(abortKey);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
res.on('finish', cleanupHandler);
|
||||
};
|
||||
|
||||
@@ -368,7 +364,15 @@ const handleAbortError = async (res, req, error, data) => {
|
||||
};
|
||||
}
|
||||
|
||||
const callback = createCleanUpHandler(conversationId);
|
||||
// Create a simple callback without capturing parent scope
|
||||
const callback = async () => {
|
||||
try {
|
||||
cleanupAbortController(conversationId);
|
||||
} catch (e) {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
await sendError(req, res, options, callback);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { handleAbortError } = require('~/server/middleware/abortMiddleware');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
|
||||
/**
|
||||
* Checks if the assistant is supported or excluded
|
||||
@@ -12,7 +13,7 @@ const { handleAbortError } = require('~/server/middleware/abortMiddleware');
|
||||
const validateAssistant = async (req, res, next) => {
|
||||
const { endpoint, conversationId, assistant_id, messageId } = req.body;
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
/** @type {Partial<TAssistantEndpoint>} */
|
||||
const assistantsConfig = appConfig.endpoints?.[endpoint];
|
||||
if (!assistantsConfig) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const { getAssistant } = require('~/models/Assistant');
|
||||
|
||||
/**
|
||||
@@ -20,7 +21,7 @@ const validateAuthor = async ({ req, openai, overrideEndpoint, overrideAssistant
|
||||
const assistant_id =
|
||||
overrideAssistantId ?? req.params.id ?? req.body.assistant_id ?? req.query.assistant_id;
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
/** @type {Partial<TAssistantEndpoint>} */
|
||||
const assistantsConfig = appConfig.endpoints?.[endpoint];
|
||||
if (!assistantsConfig) {
|
||||
|
||||
@@ -10,6 +10,7 @@ const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
|
||||
const assistants = require('~/server/services/Endpoints/assistants');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const bedrock = require('~/server/services/Endpoints/bedrock');
|
||||
const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const agents = require('~/server/services/Endpoints/agents');
|
||||
@@ -40,7 +41,7 @@ async function buildEndpointOption(req, res, next) {
|
||||
return handleError(res, { text: 'Error parsing conversation' });
|
||||
}
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
if (appConfig.modelSpecs?.list && appConfig.modelSpecs?.enforce) {
|
||||
/** @type {{ list: TModelSpec[] }}*/
|
||||
const { list } = appConfig.modelSpecs;
|
||||
|
||||
@@ -15,10 +15,10 @@ const { getAppConfig } = require('~/server/services/Config');
|
||||
*/
|
||||
const checkDomainAllowed = async (req, res, next = () => {}) => {
|
||||
const email = req?.user?.email;
|
||||
const appConfig = await getAppConfig({
|
||||
const appConfig = getAppConfig({
|
||||
role: req?.user?.role,
|
||||
});
|
||||
if (email && !isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
if (email && !(await isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains))) {
|
||||
logger.error(`[Social Login] [Social Login not allowed] [Email: ${email}]`);
|
||||
return res.redirect('/login');
|
||||
} else {
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
const configMiddleware = async (req, res, next) => {
|
||||
try {
|
||||
const userRole = req.user?.role;
|
||||
req.config = await getAppConfig({ role: userRole });
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('Config middleware error:', {
|
||||
error: error.message,
|
||||
userRole: req.user?.role,
|
||||
path: req.path,
|
||||
});
|
||||
|
||||
try {
|
||||
req.config = await getAppConfig();
|
||||
next();
|
||||
} catch (fallbackError) {
|
||||
logger.error('Fallback config middleware error:', fallbackError);
|
||||
next(fallbackError);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = configMiddleware;
|
||||
@@ -82,7 +82,7 @@ const sendError = async (req, res, options, callback) => {
|
||||
|
||||
/**
|
||||
* Sends the response based on whether headers have been sent or not.
|
||||
* @param {ServerRequest} req - The server response.
|
||||
* @param {Express.Request} req - The server response.
|
||||
* @param {Express.Response} res - The server response.
|
||||
* @param {Object} data - The data to be sent.
|
||||
* @param {string} [errorMessage] - The error message, if any.
|
||||
|
||||
@@ -13,7 +13,6 @@ const requireLdapAuth = require('./requireLdapAuth');
|
||||
const abortMiddleware = require('./abortMiddleware');
|
||||
const checkInviteUser = require('./checkInviteUser');
|
||||
const requireJwtAuth = require('./requireJwtAuth');
|
||||
const configMiddleware = require('./config/app');
|
||||
const validateModel = require('./validateModel');
|
||||
const moderateText = require('./moderateText');
|
||||
const logHeaders = require('./logHeaders');
|
||||
@@ -44,7 +43,6 @@ module.exports = {
|
||||
requireLocalAuth,
|
||||
canDeleteAccount,
|
||||
validateEndpoint,
|
||||
configMiddleware,
|
||||
concurrentLimiter,
|
||||
checkDomainAllowed,
|
||||
validateMessageReq,
|
||||
|
||||
@@ -15,7 +15,7 @@ const { USE_REDIS, CONVO_ACCESS_VIOLATION_SCORE: score = 0 } = process.env ?? {}
|
||||
* If the `cache` store is not available, the middleware will skip its logic.
|
||||
*
|
||||
* @function
|
||||
* @param {ServerRequest} req - Express request object containing user information.
|
||||
* @param {Express.Request} req - Express request object containing user information.
|
||||
* @param {Express.Response} res - Express response object.
|
||||
* @param {function} next - Express next middleware function.
|
||||
* @throws {Error} Throws an error if the user doesn't have access to the conversation.
|
||||
|
||||
@@ -6,7 +6,7 @@ const { logViolation } = require('~/cache');
|
||||
* Validates the model of the request.
|
||||
*
|
||||
* @async
|
||||
* @param {ServerRequest} req - The Express request object.
|
||||
* @param {Express.Request} req - The Express request object.
|
||||
* @param {Express.Response} res - The Express response object.
|
||||
* @param {Function} next - The Express next function.
|
||||
*/
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
const express = require('express');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const request = require('supertest');
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const express = require('express');
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
...jest.requireActual('@librechat/api'),
|
||||
MCPOAuthHandler: {
|
||||
initiateOAuthFlow: jest.fn(),
|
||||
getFlowState: jest.fn(),
|
||||
completeOAuthFlow: jest.fn(),
|
||||
generateFlowId: jest.fn(),
|
||||
},
|
||||
getUserMCPAuthMap: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
@@ -38,7 +36,6 @@ jest.mock('~/models', () => ({
|
||||
updateToken: jest.fn(),
|
||||
createToken: jest.fn(),
|
||||
deleteTokens: jest.fn(),
|
||||
findPluginAuthsByKeys: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
@@ -47,10 +44,6 @@ jest.mock('~/server/services/Config', () => ({
|
||||
loadCustomConfig: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Config/mcpToolsCache', () => ({
|
||||
updateMCPUserTools: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/MCP', () => ({
|
||||
getMCPSetupData: jest.fn(),
|
||||
getServerConnectionStatus: jest.fn(),
|
||||
@@ -73,10 +66,6 @@ jest.mock('~/server/middleware', () => ({
|
||||
requireJwtAuth: (req, res, next) => next(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/mcp', () => ({
|
||||
reinitMCPServer: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('MCP Routes', () => {
|
||||
let app;
|
||||
let mongoServer;
|
||||
@@ -688,13 +677,6 @@ describe('MCP Routes', () => {
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'oauth-server' ready for OAuth authentication",
|
||||
serverName: 'oauth-server',
|
||||
oauthRequired: true,
|
||||
oauthUrl: 'https://oauth.example.com/auth',
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/oauth-server/reinitialize');
|
||||
|
||||
@@ -719,7 +701,6 @@ describe('MCP Routes', () => {
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue(null);
|
||||
|
||||
const response = await request(app).post('/api/mcp/error-server/reinitialize');
|
||||
|
||||
@@ -778,18 +759,8 @@ describe('MCP Routes', () => {
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
|
||||
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
getCachedTools.mockResolvedValue({});
|
||||
setCachedTools.mockResolvedValue();
|
||||
updateMCPUserTools.mockResolvedValue();
|
||||
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
serverName: 'test-server',
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize');
|
||||
|
||||
@@ -805,6 +776,7 @@ describe('MCP Routes', () => {
|
||||
'test-user-id',
|
||||
'test-server',
|
||||
);
|
||||
expect(setCachedTools).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle server with custom user variables', async () => {
|
||||
@@ -826,38 +798,21 @@ describe('MCP Routes', () => {
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
require('~/config').getFlowStateManager.mockReturnValue({});
|
||||
require('~/cache').getLogStores.mockReturnValue({});
|
||||
require('@librechat/api').getUserMCPAuthMap.mockResolvedValue({
|
||||
'mcp:test-server': {
|
||||
API_KEY: 'api-key-value',
|
||||
},
|
||||
});
|
||||
require('~/models').findPluginAuthsByKeys.mockResolvedValue([
|
||||
{ key: 'API_KEY', value: 'api-key-value' },
|
||||
]);
|
||||
require('~/server/services/PluginService').getUserPluginAuthValue.mockResolvedValue(
|
||||
'api-key-value',
|
||||
);
|
||||
|
||||
const { getCachedTools, setCachedTools } = require('~/server/services/Config');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
getCachedTools.mockResolvedValue({});
|
||||
setCachedTools.mockResolvedValue();
|
||||
updateMCPUserTools.mockResolvedValue();
|
||||
|
||||
require('~/server/services/Tools/mcp').reinitMCPServer.mockResolvedValue({
|
||||
success: true,
|
||||
message: "MCP server 'test-server' reinitialized successfully",
|
||||
serverName: 'test-server',
|
||||
oauthRequired: false,
|
||||
oauthUrl: null,
|
||||
});
|
||||
|
||||
const response = await request(app).post('/api/mcp/test-server/reinitialize');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(require('@librechat/api').getUserMCPAuthMap).toHaveBeenCalledWith({
|
||||
userId: 'test-user-id',
|
||||
servers: ['test-server'],
|
||||
findPluginAuthsByKeys: require('~/models').findPluginAuthsByKeys,
|
||||
});
|
||||
expect(
|
||||
require('~/server/services/PluginService').getUserPluginAuthValue,
|
||||
).toHaveBeenCalledWith('test-user-id', 'API_KEY', false);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ const { getAgent, updateAgent, getListAgentsByAccess } = require('~/models/Agent
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { canAccessAgentResource } = require('~/server/middleware');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -83,7 +84,7 @@ router.post(
|
||||
}
|
||||
|
||||
let metadata = await encryptMetadata(removeNullishValues(_metadata, true));
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user.role });
|
||||
const isDomainAllowed = await isActionDomainAllowed(
|
||||
metadata.domain,
|
||||
appConfig?.actions?.allowedDomains,
|
||||
|
||||
@@ -4,7 +4,6 @@ const {
|
||||
checkBan,
|
||||
requireJwtAuth,
|
||||
messageIpLimiter,
|
||||
configMiddleware,
|
||||
concurrentLimiter,
|
||||
messageUserLimiter,
|
||||
} = require('~/server/middleware');
|
||||
@@ -23,8 +22,6 @@ router.use(uaParser);
|
||||
router.use('/', v1);
|
||||
|
||||
const chatRouter = express.Router();
|
||||
chatRouter.use(configMiddleware);
|
||||
|
||||
if (isEnabled(LIMIT_CONCURRENT_MESSAGES)) {
|
||||
chatRouter.use(concurrentLimiter);
|
||||
}
|
||||
@@ -40,4 +37,6 @@ if (isEnabled(LIMIT_MESSAGE_USER)) {
|
||||
chatRouter.use('/', chat);
|
||||
router.use('/chat', chatRouter);
|
||||
|
||||
// Add marketplace routes
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const express = require('express');
|
||||
const { callTool, verifyToolAuth, getToolCalls } = require('~/server/controllers/tools');
|
||||
const { getAvailableTools } = require('~/server/controllers/PluginController');
|
||||
const { toolCallLimiter } = require('~/server/middleware');
|
||||
const { toolCallLimiter } = require('~/server/middleware/limiters');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const express = require('express');
|
||||
const { generateCheckAccess } = require('@librechat/api');
|
||||
const { PermissionTypes, Permissions, PermissionBits } = require('librechat-data-provider');
|
||||
const { requireJwtAuth, configMiddleware, canAccessAgentResource } = require('~/server/middleware');
|
||||
const { requireJwtAuth, canAccessAgentResource } = require('~/server/middleware');
|
||||
const v1 = require('~/server/controllers/agents/v1');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const actions = require('./actions');
|
||||
@@ -36,17 +36,17 @@ router.use(requireJwtAuth);
|
||||
* Agent actions route.
|
||||
* @route GET|POST /agents/actions
|
||||
*/
|
||||
router.use('/actions', configMiddleware, actions);
|
||||
router.use('/actions', actions);
|
||||
|
||||
/**
|
||||
* Get a list of available tools for agents.
|
||||
* @route GET /agents/tools
|
||||
*/
|
||||
router.use('/tools', configMiddleware, tools);
|
||||
router.use('/tools', tools);
|
||||
|
||||
/**
|
||||
* Get all agent categories with counts
|
||||
* @route GET /agents/categories
|
||||
* @route GET /agents/marketplace/categories
|
||||
*/
|
||||
router.get('/categories', v1.getAgentCategories);
|
||||
/**
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
const express = require('express');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { actionDelimiter, EModelEndpoint, removeNullishValues } = require('librechat-data-provider');
|
||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||
const { updateAssistantDoc, getAssistant } = require('~/models/Assistant');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -21,7 +22,7 @@ const router = express.Router();
|
||||
*/
|
||||
router.post('/:assistant_id', async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const { assistant_id } = req.params;
|
||||
|
||||
/** @type {{ functions: FunctionTool[], action_id: string, metadata: ActionMetadata }} */
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const express = require('express');
|
||||
const { uaParser, checkBan, requireJwtAuth, configMiddleware } = require('~/server/middleware');
|
||||
const router = express.Router();
|
||||
const { uaParser, checkBan, requireJwtAuth } = require('~/server/middleware');
|
||||
|
||||
const { v1 } = require('./v1');
|
||||
const chatV1 = require('./chatV1');
|
||||
@@ -10,7 +10,6 @@ const chatV2 = require('./chatV2');
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkBan);
|
||||
router.use(uaParser);
|
||||
router.use(configMiddleware);
|
||||
router.use('/v1/', v1);
|
||||
router.use('/v1/chat', chatV1);
|
||||
router.use('/v2/', v2);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const express = require('express');
|
||||
const { configMiddleware } = require('~/server/middleware');
|
||||
const v1 = require('~/server/controllers/assistants/v1');
|
||||
const v2 = require('~/server/controllers/assistants/v2');
|
||||
const documents = require('./documents');
|
||||
@@ -7,7 +6,6 @@ const actions = require('./actions');
|
||||
const tools = require('./tools');
|
||||
|
||||
const router = express.Router();
|
||||
router.use(configMiddleware);
|
||||
|
||||
/**
|
||||
* Assistant actions route.
|
||||
|
||||
@@ -17,12 +17,12 @@ const {
|
||||
const { verify2FAWithTempToken } = require('~/server/controllers/auth/TwoFactorAuthController');
|
||||
const { logoutController } = require('~/server/controllers/auth/LogoutController');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const middleware = require('~/server/middleware');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
const setBalanceConfig = createSetBalanceConfig({
|
||||
getAppConfig,
|
||||
getBalanceConfig,
|
||||
Balance,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const express = require('express');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, getBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
CacheKeys,
|
||||
@@ -62,8 +62,6 @@ router.get('/', async function (req, res) {
|
||||
!!process.env.SAML_CERT &&
|
||||
!!process.env.SAML_SESSION_SECRET;
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
|
||||
/** @type {TStartupConfig} */
|
||||
const payload = {
|
||||
appTitle: process.env.APP_TITLE || 'LibreChat',
|
||||
@@ -103,7 +101,7 @@ router.get('/', async function (req, res) {
|
||||
interface: appConfig?.interfaceConfig,
|
||||
turnstile: appConfig?.turnstileConfig,
|
||||
modelSpecs: appConfig?.modelSpecs,
|
||||
balance: balanceConfig,
|
||||
balance: appConfig?.balance,
|
||||
sharedLinksEnabled,
|
||||
publicSharedLinksEnabled,
|
||||
analyticsGtmId: process.env.ANALYTICS_GTM_ID,
|
||||
@@ -117,17 +115,9 @@ router.get('/', async function (req, res) {
|
||||
openidReuseTokens,
|
||||
};
|
||||
|
||||
const minPasswordLength = parseInt(process.env.MIN_PASSWORD_LENGTH, 10);
|
||||
if (minPasswordLength && !isNaN(minPasswordLength)) {
|
||||
payload.minPasswordLength = minPasswordLength;
|
||||
}
|
||||
|
||||
payload.mcpServers = {};
|
||||
const getMCPServers = () => {
|
||||
try {
|
||||
if (appConfig?.mcpConfig == null) {
|
||||
return;
|
||||
}
|
||||
const mcpManager = getMCPManager();
|
||||
if (!mcpManager) {
|
||||
return;
|
||||
|
||||
@@ -4,13 +4,9 @@ const { sleep } = require('@librechat/agents');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
createImportLimiters,
|
||||
createForkLimiters,
|
||||
configMiddleware,
|
||||
} = require('~/server/middleware');
|
||||
const { getConvosByCursor, deleteConvos, getConvo, saveConvo } = require('~/models/Conversation');
|
||||
const { forkConversation, duplicateConversation } = require('~/server/utils/import/fork');
|
||||
const { createImportLimiters, createForkLimiters } = require('~/server/middleware');
|
||||
const { storage, importFileFilter } = require('~/server/routes/files/multer');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { importConversations } = require('~/server/utils/import');
|
||||
@@ -175,7 +171,6 @@ router.post(
|
||||
'/import',
|
||||
importIpLimiter,
|
||||
importUserLimiter,
|
||||
configMiddleware,
|
||||
upload.single('file'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
const express = require('express');
|
||||
const endpointController = require('~/server/controllers/EndpointController');
|
||||
|
||||
const router = express.Router();
|
||||
const endpointController = require('~/server/controllers/EndpointController');
|
||||
const overrideController = require('~/server/controllers/OverrideController');
|
||||
|
||||
router.get('/', endpointController);
|
||||
router.get('/config/override', overrideController);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
const fs = require('fs').promises;
|
||||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
|
||||
const { filterFile } = require('~/server/services/Files/process');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
filterFile({ req, file: req.file, image: true, isAvatar: true });
|
||||
const userId = req.user.id;
|
||||
const { manual } = req.body;
|
||||
|
||||
@@ -26,18 +26,18 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
||||
const { hasAccessToFilesViaAgent } = require('~/server/services/Files');
|
||||
const { getFiles, batchUpdateFiles } = require('~/models/File');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { cleanFileName } = require('~/server/utils/files');
|
||||
const { getAssistant } = require('~/models/Assistant');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { logger } = require('~/config');
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const files = await getFiles({ user: req.user.id });
|
||||
if (appConfig.fileStrategy === FileSources.s3) {
|
||||
try {
|
||||
@@ -116,7 +116,7 @@ router.get('/agent/:agent_id', async (req, res) => {
|
||||
|
||||
router.get('/config', async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
res.status(200).json(appConfig.fileConfig);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error getting fileConfig', error);
|
||||
@@ -185,7 +185,6 @@ router.delete('/', async (req, res) => {
|
||||
role: req.user.role,
|
||||
fileIds: nonOwnedFileIds,
|
||||
agentId: req.body.agent_id,
|
||||
isDelete: true,
|
||||
});
|
||||
|
||||
for (const file of nonOwnedFiles) {
|
||||
@@ -327,6 +326,11 @@ router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
|
||||
res.setHeader('X-File-Metadata', JSON.stringify(file));
|
||||
};
|
||||
|
||||
/** @type {{ body: import('stream').PassThrough } | undefined} */
|
||||
let passThrough;
|
||||
/** @type {ReadableStream | undefined} */
|
||||
let fileStream;
|
||||
|
||||
if (checkOpenAIStorage(file.source)) {
|
||||
req.body = { model: file.model };
|
||||
const endpointMap = {
|
||||
@@ -339,19 +343,12 @@ router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
|
||||
overrideEndpoint: endpointMap[file.source],
|
||||
});
|
||||
logger.debug(`Downloading file ${file_id} from OpenAI`);
|
||||
const passThrough = await getDownloadStream(file_id, openai);
|
||||
passThrough = await getDownloadStream(file_id, openai);
|
||||
setHeaders();
|
||||
logger.debug(`File ${file_id} downloaded from OpenAI`);
|
||||
|
||||
// Handle both Node.js and Web streams
|
||||
const stream =
|
||||
passThrough.body && typeof passThrough.body.getReader === 'function'
|
||||
? Readable.fromWeb(passThrough.body)
|
||||
: passThrough.body;
|
||||
|
||||
stream.pipe(res);
|
||||
passThrough.body.pipe(res);
|
||||
} else {
|
||||
const fileStream = await getDownloadStream(req, file.filepath);
|
||||
fileStream = await getDownloadStream(req, file.filepath);
|
||||
|
||||
fileStream.on('error', (streamError) => {
|
||||
logger.error('[DOWNLOAD ROUTE] Stream error:', streamError);
|
||||
@@ -391,8 +388,7 @@ router.post('/', async (req, res) => {
|
||||
|
||||
if (
|
||||
error.message?.includes('Invalid file format') ||
|
||||
error.message?.includes('No OCR result') ||
|
||||
error.message?.includes('exceeds token limit')
|
||||
error.message?.includes('No OCR result')
|
||||
) {
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isAgentsEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
filterFile,
|
||||
processImageFile,
|
||||
processAgentFileUpload,
|
||||
} = require('~/server/services/Files/process');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const metadata = req.body;
|
||||
const appConfig = req.config;
|
||||
|
||||
try {
|
||||
filterFile({ req, image: true });
|
||||
@@ -29,17 +30,6 @@ router.post('/', async (req, res) => {
|
||||
} catch (error) {
|
||||
// TODO: delete remote file if it exists
|
||||
logger.error('[/files/images] Error processing file:', error);
|
||||
|
||||
let message = 'Error processing file';
|
||||
|
||||
if (
|
||||
error.message?.includes('Invalid file format') ||
|
||||
error.message?.includes('No OCR result') ||
|
||||
error.message?.includes('exceeds token limit')
|
||||
) {
|
||||
message = error.message;
|
||||
}
|
||||
|
||||
try {
|
||||
const filepath = path.join(
|
||||
appConfig.paths.imageOutput,
|
||||
@@ -50,7 +40,7 @@ router.post('/', async (req, res) => {
|
||||
} catch (error) {
|
||||
logger.error('[/files/images] Error deleting file:', error);
|
||||
}
|
||||
res.status(500).json({ message });
|
||||
res.status(500).json({ message: 'Error processing file' });
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(req.file.path);
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
const express = require('express');
|
||||
const {
|
||||
createFileLimiters,
|
||||
configMiddleware,
|
||||
requireJwtAuth,
|
||||
uaParser,
|
||||
checkBan,
|
||||
} = require('~/server/middleware');
|
||||
const { uaParser, checkBan, requireJwtAuth, createFileLimiters } = require('~/server/middleware');
|
||||
const { avatar: asstAvatarRouter } = require('~/server/routes/assistants/v1');
|
||||
const { avatar: agentAvatarRouter } = require('~/server/routes/agents/v1');
|
||||
const { createMulterInstance } = require('./multer');
|
||||
@@ -18,7 +12,6 @@ const speech = require('./speech');
|
||||
const initialize = async () => {
|
||||
const router = express.Router();
|
||||
router.use(requireJwtAuth);
|
||||
router.use(configMiddleware);
|
||||
router.use(checkBan);
|
||||
router.use(uaParser);
|
||||
|
||||
|
||||
@@ -8,12 +8,17 @@ const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
const storage = multer.diskStorage({
|
||||
destination: function (req, file, cb) {
|
||||
const appConfig = req.config;
|
||||
const outputPath = path.join(appConfig.paths.uploads, 'temp', req.user.id);
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
cb(null, outputPath);
|
||||
getAppConfig({ role: req.user?.role })
|
||||
.then((appConfig) => {
|
||||
const outputPath = path.join(appConfig.paths.uploads, 'temp', req.user.id);
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
cb(null, outputPath);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
},
|
||||
filename: function (req, file, cb) {
|
||||
req.file_id = crypto.randomUUID();
|
||||
|
||||
@@ -24,11 +24,6 @@ describe('Multer Configuration', () => {
|
||||
user: { id: 'test-user-123' },
|
||||
body: {},
|
||||
originalUrl: '/api/files/upload',
|
||||
config: {
|
||||
paths: {
|
||||
uploads: tempDir,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockFile = {
|
||||
@@ -37,6 +32,14 @@ describe('Multer Configuration', () => {
|
||||
size: 1024,
|
||||
};
|
||||
|
||||
// Mock getAppConfig to return paths
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
getAppConfig.mockResolvedValue({
|
||||
paths: {
|
||||
uploads: tempDir,
|
||||
},
|
||||
});
|
||||
|
||||
// Clear mocks
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
@@ -63,7 +66,12 @@ describe('Multer Configuration', () => {
|
||||
|
||||
it("should create directory recursively if it doesn't exist", (done) => {
|
||||
const deepPath = path.join(tempDir, 'deep', 'nested', 'path');
|
||||
mockReq.config.paths.uploads = deepPath;
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
getAppConfig.mockResolvedValue({
|
||||
paths: {
|
||||
uploads: deepPath,
|
||||
},
|
||||
});
|
||||
|
||||
const cb = jest.fn((err, destination) => {
|
||||
expect(err).toBeNull();
|
||||
@@ -446,15 +454,27 @@ describe('Multer Configuration', () => {
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle file system errors when directory creation fails', () => {
|
||||
it('should handle file system errors when directory creation fails', (done) => {
|
||||
// Test with a non-existent parent directory to simulate fs issues
|
||||
const invalidPath = '/nonexistent/path/that/should/not/exist';
|
||||
mockReq.config.paths.uploads = invalidPath;
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
getAppConfig.mockResolvedValue({
|
||||
paths: {
|
||||
uploads: invalidPath,
|
||||
},
|
||||
});
|
||||
|
||||
// The current implementation doesn't catch errors, so they're thrown synchronously
|
||||
expect(() => {
|
||||
storage.getDestination(mockReq, mockFile, jest.fn());
|
||||
}).toThrow();
|
||||
// Call getDestination which should fail due to permission/path issues
|
||||
storage.getDestination(mockReq, mockFile, (err, destination) => {
|
||||
// Now we expect the error to be passed to the callback
|
||||
expect(err).toBeDefined();
|
||||
// This is the expected behavior - mkdirSync throws synchronously for invalid paths
|
||||
// On Linux, this typically returns EACCES (permission denied)
|
||||
// On macOS/Darwin, this returns ENOENT (no such file or directory)
|
||||
expect(['EACCES', 'ENOENT']).toContain(err.code);
|
||||
expect(destination).toBeUndefined();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle malformed filenames with real sanitization', (done) => {
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
const { Router } = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { MCPOAuthHandler, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { Router } = require('express');
|
||||
const { getMCPSetupData, getServerConnectionStatus } = require('~/server/services/MCP');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
const { setCachedTools, getCachedTools } = require('~/server/services/Config');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { reinitMCPServer } = require('~/server/services/Tools/mcp');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { findPluginAuthsByKeys } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const router = Router();
|
||||
@@ -144,12 +142,33 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
`[MCP OAuth] Successfully reconnected ${serverName} for user ${flowState.userId}`,
|
||||
);
|
||||
|
||||
const userTools = (await getCachedTools({ userId: flowState.userId })) || {};
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
await updateMCPUserTools({
|
||||
userId: flowState.userId,
|
||||
serverName,
|
||||
tools,
|
||||
});
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: flowState.userId });
|
||||
|
||||
logger.debug(
|
||||
`[MCP OAuth] Cached ${tools.length} tools for ${serverName} user ${flowState.userId}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
|
||||
}
|
||||
@@ -304,39 +323,124 @@ router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => {
|
||||
});
|
||||
}
|
||||
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
|
||||
await mcpManager.disconnectUserConnection(user.id, serverName);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] Disconnected existing user connection for server: ${serverName}`,
|
||||
);
|
||||
|
||||
/** @type {Record<string, Record<string, string>> | undefined} */
|
||||
let userMCPAuthMap;
|
||||
let customUserVars = {};
|
||||
if (serverConfig.customUserVars && typeof serverConfig.customUserVars === 'object') {
|
||||
userMCPAuthMap = await getUserMCPAuthMap({
|
||||
userId: user.id,
|
||||
servers: [serverName],
|
||||
findPluginAuthsByKeys,
|
||||
for (const varName of Object.keys(serverConfig.customUserVars)) {
|
||||
try {
|
||||
const value = await getUserPluginAuthValue(user.id, varName, false);
|
||||
customUserVars[varName] = value;
|
||||
} catch (err) {
|
||||
logger.error(`[MCP Reinitialize] Error fetching ${varName} for user ${user.id}:`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let userConnection = null;
|
||||
let oauthRequired = false;
|
||||
let oauthUrl = null;
|
||||
|
||||
try {
|
||||
userConnection = await mcpManager.getUserConnection({
|
||||
user,
|
||||
serverName,
|
||||
flowManager,
|
||||
customUserVars,
|
||||
tokenMethods: {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
},
|
||||
returnOnOAuth: true,
|
||||
oauthStart: async (authURL) => {
|
||||
logger.info(`[MCP Reinitialize] OAuth URL received: ${authURL}`);
|
||||
oauthUrl = authURL;
|
||||
oauthRequired = true;
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`[MCP Reinitialize] Successfully established connection for ${serverName}`);
|
||||
} catch (err) {
|
||||
logger.info(`[MCP Reinitialize] getUserConnection threw error: ${err.message}`);
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth state - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const isOAuthError =
|
||||
err.message?.includes('OAuth') ||
|
||||
err.message?.includes('authentication') ||
|
||||
err.message?.includes('401');
|
||||
|
||||
const isOAuthFlowInitiated = err.message === 'OAuth flow initiated - return early';
|
||||
|
||||
if (isOAuthError || oauthRequired || isOAuthFlowInitiated) {
|
||||
logger.info(
|
||||
`[MCP Reinitialize] OAuth required for ${serverName} (isOAuthError: ${isOAuthError}, oauthRequired: ${oauthRequired}, isOAuthFlowInitiated: ${isOAuthFlowInitiated})`,
|
||||
);
|
||||
oauthRequired = true;
|
||||
} else {
|
||||
logger.error(
|
||||
`[MCP Reinitialize] Error initializing MCP server ${serverName} for user:`,
|
||||
err,
|
||||
);
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
}
|
||||
}
|
||||
|
||||
const result = await reinitMCPServer({
|
||||
req,
|
||||
serverName,
|
||||
userMCPAuthMap,
|
||||
});
|
||||
if (userConnection && !oauthRequired) {
|
||||
const userTools = (await getCachedTools({ userId: user.id })) || {};
|
||||
|
||||
if (!result) {
|
||||
return res.status(500).json({ error: 'Failed to reinitialize MCP server for user' });
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId: user.id });
|
||||
}
|
||||
|
||||
const { success, message, oauthRequired, oauthUrl } = result;
|
||||
logger.debug(
|
||||
`[MCP Reinitialize] Sending response for ${serverName} - oauthRequired: ${oauthRequired}, oauthUrl: ${oauthUrl ? 'present' : 'null'}`,
|
||||
);
|
||||
|
||||
const getResponseMessage = () => {
|
||||
if (oauthRequired) {
|
||||
return `MCP server '${serverName}' ready for OAuth authentication`;
|
||||
}
|
||||
if (userConnection) {
|
||||
return `MCP server '${serverName}' reinitialized successfully`;
|
||||
}
|
||||
return `Failed to reinitialize MCP server '${serverName}'`;
|
||||
};
|
||||
|
||||
res.json({
|
||||
success,
|
||||
message,
|
||||
oauthUrl,
|
||||
success: Boolean((userConnection && !oauthRequired) || (oauthRequired && oauthUrl)),
|
||||
message: getResponseMessage(),
|
||||
serverName,
|
||||
oauthRequired,
|
||||
oauthUrl,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[MCP Reinitialize] Unexpected error', error);
|
||||
|
||||
@@ -8,7 +8,8 @@ const {
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { requireJwtAuth, configMiddleware } = require('~/server/middleware');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
const router = express.Router();
|
||||
@@ -48,7 +49,7 @@ router.use(requireJwtAuth);
|
||||
* Returns all memories for the authenticated user, sorted by updated_at (newest first).
|
||||
* Also includes memory usage percentage based on token limit.
|
||||
*/
|
||||
router.get('/', checkMemoryRead, configMiddleware, async (req, res) => {
|
||||
router.get('/', checkMemoryRead, async (req, res) => {
|
||||
try {
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
|
||||
@@ -60,7 +61,7 @@ router.get('/', checkMemoryRead, configMiddleware, async (req, res) => {
|
||||
return sum + (memory.tokenCount || 0);
|
||||
}, 0);
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const memoryConfig = appConfig?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
@@ -88,7 +89,7 @@ router.get('/', checkMemoryRead, configMiddleware, async (req, res) => {
|
||||
* Body: { key: string, value: string }
|
||||
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
|
||||
*/
|
||||
router.post('/', memoryPayloadLimit, checkMemoryCreate, configMiddleware, async (req, res) => {
|
||||
router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
|
||||
const { key, value } = req.body;
|
||||
|
||||
if (typeof key !== 'string' || key.trim() === '') {
|
||||
@@ -99,7 +100,7 @@ router.post('/', memoryPayloadLimit, checkMemoryCreate, configMiddleware, async
|
||||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const memoryConfig = appConfig?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
@@ -120,7 +121,7 @@ router.post('/', memoryPayloadLimit, checkMemoryCreate, configMiddleware, async
|
||||
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const memoryConfig = appConfig?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
|
||||
@@ -196,7 +197,7 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
|
||||
* Body: { key?: string, value: string }
|
||||
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
|
||||
*/
|
||||
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, configMiddleware, async (req, res) => {
|
||||
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) => {
|
||||
const { key: urlKey } = req.params;
|
||||
const { key: bodyKey, value } = req.body || {};
|
||||
|
||||
@@ -205,7 +206,7 @@ router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, configMiddleware, a
|
||||
}
|
||||
|
||||
const newKey = bodyKey || urlKey;
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const memoryConfig = appConfig?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
|
||||
@@ -8,11 +8,11 @@ const { isEnabled, createSetBalanceConfig } = require('@librechat/api');
|
||||
const { checkDomainAllowed, loginLimiter, logHeaders, checkBan } = require('~/server/middleware');
|
||||
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getBalanceConfig } = require('~/server/services/Config');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
const setBalanceConfig = createSetBalanceConfig({
|
||||
getAppConfig,
|
||||
getBalanceConfig,
|
||||
Balance,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const express = require('express');
|
||||
const { getAvailablePluginsController } = require('~/server/controllers/PluginController');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { getAvailablePluginsController } = require('../controllers/PluginController');
|
||||
const requireJwtAuth = require('../middleware/requireJwtAuth');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
||||
@@ -156,7 +156,7 @@ router.get('/all', async (req, res) => {
|
||||
router.get('/groups', async (req, res) => {
|
||||
try {
|
||||
const userId = req.user.id;
|
||||
const { pageSize, limit, cursor, name, category, ...otherFilters } = req.query;
|
||||
const { pageSize, pageNumber, limit, cursor, name, category, ...otherFilters } = req.query;
|
||||
|
||||
const { filter, searchShared, searchSharedOnly } = buildPromptGroupFilter({
|
||||
name,
|
||||
@@ -171,13 +171,6 @@ router.get('/groups', async (req, res) => {
|
||||
actualLimit = parseInt(pageSize, 10);
|
||||
}
|
||||
|
||||
if (
|
||||
actualCursor &&
|
||||
(actualCursor === 'undefined' || actualCursor === 'null' || actualCursor.length === 0)
|
||||
) {
|
||||
actualCursor = null;
|
||||
}
|
||||
|
||||
let accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
role: req.user.role,
|
||||
@@ -197,7 +190,6 @@ router.get('/groups', async (req, res) => {
|
||||
publicPromptGroupIds: publiclyAccessibleIds,
|
||||
});
|
||||
|
||||
// Cursor-based pagination only
|
||||
const result = await getListPromptGroupsByAccess({
|
||||
accessibleIds: filteredAccessibleIds,
|
||||
otherParams: filter,
|
||||
@@ -206,21 +198,19 @@ router.get('/groups', async (req, res) => {
|
||||
});
|
||||
|
||||
if (!result) {
|
||||
const emptyResponse = createEmptyPromptGroupsResponse({
|
||||
pageNumber: '1',
|
||||
pageSize: actualLimit,
|
||||
actualLimit,
|
||||
});
|
||||
const emptyResponse = createEmptyPromptGroupsResponse({ pageNumber, pageSize, actualLimit });
|
||||
return res.status(200).send(emptyResponse);
|
||||
}
|
||||
|
||||
const { data: promptGroups = [], has_more = false, after = null } = result;
|
||||
|
||||
const groupsWithPublicFlag = markPublicPromptGroups(promptGroups, publiclyAccessibleIds);
|
||||
|
||||
const response = formatPromptGroupsResponse({
|
||||
promptGroups: groupsWithPublicFlag,
|
||||
pageNumber: '1', // Always 1 for cursor-based pagination
|
||||
pageSize: actualLimit.toString(),
|
||||
pageNumber,
|
||||
pageSize,
|
||||
actualLimit,
|
||||
hasMore: has_more,
|
||||
after,
|
||||
});
|
||||
@@ -434,7 +424,7 @@ router.get('/', async (req, res) => {
|
||||
/**
|
||||
* Deletes a prompt
|
||||
*
|
||||
* @param {ServerRequest} req - The request object.
|
||||
* @param {Express.Request} req - The request object.
|
||||
* @param {TDeletePromptVariables} req.params - The request parameters
|
||||
* @param {import('mongoose').ObjectId} req.params.promptId - The prompt ID
|
||||
* @param {Express.Response} res - The response object.
|
||||
|
||||
@@ -33,11 +33,22 @@ let promptRoutes;
|
||||
let Prompt, PromptGroup, AclEntry, AccessRole, User;
|
||||
let testUsers, testRoles;
|
||||
let grantPermission;
|
||||
let currentTestUser; // Track current user for middleware
|
||||
|
||||
// Helper function to set user in middleware
|
||||
function setTestUser(app, user) {
|
||||
currentTestUser = user;
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...(user.toObject ? user.toObject() : user),
|
||||
id: user.id || user._id.toString(),
|
||||
_id: user._id,
|
||||
name: user.name,
|
||||
role: user.role,
|
||||
};
|
||||
if (user.role === SystemRoles.ADMIN) {
|
||||
console.log('Setting admin user with role:', req.user.role);
|
||||
}
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -64,35 +75,14 @@ beforeAll(async () => {
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Add user middleware before routes
|
||||
app.use((req, res, next) => {
|
||||
if (currentTestUser) {
|
||||
req.user = {
|
||||
...(currentTestUser.toObject ? currentTestUser.toObject() : currentTestUser),
|
||||
id: currentTestUser._id.toString(),
|
||||
_id: currentTestUser._id,
|
||||
name: currentTestUser.name,
|
||||
role: currentTestUser.role,
|
||||
};
|
||||
}
|
||||
next();
|
||||
});
|
||||
// Mock authentication middleware - default to owner
|
||||
setTestUser(app, testUsers.owner);
|
||||
|
||||
// Set default user
|
||||
currentTestUser = testUsers.owner;
|
||||
|
||||
// Import routes after middleware is set up
|
||||
// Import routes after mocks are set up
|
||||
promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Always reset to owner user after each test for isolation
|
||||
if (currentTestUser !== testUsers.owner) {
|
||||
currentTestUser = testUsers.owner;
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
@@ -126,26 +116,36 @@ async function setupTestData() {
|
||||
// Create test users
|
||||
testUsers = {
|
||||
owner: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Owner',
|
||||
email: 'owner@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
viewer: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Viewer',
|
||||
email: 'viewer@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
editor: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Prompt Editor',
|
||||
email: 'editor@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
noAccess: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'No Access',
|
||||
email: 'noaccess@example.com',
|
||||
role: SystemRoles.USER,
|
||||
}),
|
||||
admin: await User.create({
|
||||
id: new ObjectId().toString(),
|
||||
_id: new ObjectId(),
|
||||
name: 'Admin',
|
||||
email: 'admin@example.com',
|
||||
role: SystemRoles.ADMIN,
|
||||
@@ -181,7 +181,8 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
it('should have routes loaded', async () => {
|
||||
// This should at least not crash
|
||||
const response = await request(app).get('/api/prompts/test-404');
|
||||
|
||||
console.log('Test 404 response status:', response.status);
|
||||
console.log('Test 404 response body:', response.body);
|
||||
// We expect a 401 or 404, not 500
|
||||
expect(response.status).not.toBe(500);
|
||||
});
|
||||
@@ -206,6 +207,12 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
|
||||
const response = await request(app).post('/api/prompts').send(promptData);
|
||||
|
||||
if (response.status !== 200) {
|
||||
console.log('POST /api/prompts error status:', response.status);
|
||||
console.log('POST /api/prompts error body:', response.body);
|
||||
console.log('Console errors:', consoleErrorSpy.mock.calls);
|
||||
}
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.prompt).toBeDefined();
|
||||
expect(response.body.prompt.prompt).toBe(promptData.prompt.prompt);
|
||||
@@ -311,8 +318,29 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
});
|
||||
|
||||
it('should allow admin access without explicit permissions', async () => {
|
||||
// Set admin user
|
||||
setTestUser(app, testUsers.admin);
|
||||
// First, reset the app to remove previous middleware
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Set admin user BEFORE adding routes
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.admin.toObject(),
|
||||
id: testUsers.admin._id.toString(),
|
||||
_id: testUsers.admin._id,
|
||||
name: testUsers.admin.name,
|
||||
role: testUsers.admin.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
|
||||
// Now add the routes
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
console.log('Admin user:', testUsers.admin);
|
||||
console.log('Admin role:', testUsers.admin.role);
|
||||
console.log('SystemRoles.ADMIN:', SystemRoles.ADMIN);
|
||||
|
||||
const response = await request(app).get(`/api/prompts/${testPrompt._id}`).expect(200);
|
||||
|
||||
@@ -404,8 +432,21 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
grantedBy: testUsers.editor._id,
|
||||
});
|
||||
|
||||
// Set viewer user
|
||||
setTestUser(app, testUsers.viewer);
|
||||
// Recreate app with viewer user
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.viewer.toObject(),
|
||||
id: testUsers.viewer._id.toString(),
|
||||
_id: testUsers.viewer._id,
|
||||
name: testUsers.viewer.name,
|
||||
role: testUsers.viewer.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
await request(app)
|
||||
.delete(`/api/prompts/${authorPrompt._id}`)
|
||||
@@ -458,8 +499,21 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Ensure owner user
|
||||
setTestUser(app, testUsers.owner);
|
||||
// Recreate app to ensure fresh middleware
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.owner.toObject(),
|
||||
id: testUsers.owner._id.toString(),
|
||||
_id: testUsers.owner._id,
|
||||
name: testUsers.owner.name,
|
||||
role: testUsers.owner.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/${testPrompt._id}/tags/production`)
|
||||
@@ -483,8 +537,21 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
|
||||
// Set viewer user
|
||||
setTestUser(app, testUsers.viewer);
|
||||
// Recreate app with viewer user
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, res, next) => {
|
||||
req.user = {
|
||||
...testUsers.viewer.toObject(),
|
||||
id: testUsers.viewer._id.toString(),
|
||||
_id: testUsers.viewer._id,
|
||||
name: testUsers.viewer.name,
|
||||
role: testUsers.viewer.role,
|
||||
};
|
||||
next();
|
||||
});
|
||||
const promptRoutes = require('./prompts');
|
||||
app.use('/api/prompts', promptRoutes);
|
||||
|
||||
await request(app).patch(`/api/prompts/${testPrompt._id}/tags/production`).expect(403);
|
||||
|
||||
@@ -543,305 +610,4 @@ describe('Prompt Routes - ACL Permissions', () => {
|
||||
expect(response.body._id).toBe(publicPrompt._id.toString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pagination', () => {
|
||||
beforeEach(async () => {
|
||||
// Create multiple prompt groups for pagination testing
|
||||
const groups = [];
|
||||
for (let i = 0; i < 15; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `Test Group ${i + 1}`,
|
||||
category: 'pagination-test',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - i * 1000), // Stagger updatedAt for consistent ordering
|
||||
});
|
||||
groups.push(group);
|
||||
|
||||
// Grant owner permissions on each group
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should correctly indicate hasMore when there are more pages', async () => {
|
||||
const response = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '10' })
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.promptGroups).toHaveLength(10);
|
||||
expect(response.body.has_more).toBe(true);
|
||||
expect(response.body.after).toBeTruthy();
|
||||
// Since has_more is true, pages should be a high number (9999 in our fix)
|
||||
expect(parseInt(response.body.pages)).toBeGreaterThan(1);
|
||||
});
|
||||
|
||||
it('should correctly indicate no more pages on the last page', async () => {
|
||||
// First get the cursor for page 2
|
||||
const firstPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '10' })
|
||||
.expect(200);
|
||||
|
||||
expect(firstPage.body.has_more).toBe(true);
|
||||
expect(firstPage.body.after).toBeTruthy();
|
||||
|
||||
// Now fetch the second page using the cursor
|
||||
const response = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '10', cursor: firstPage.body.after })
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.promptGroups).toHaveLength(5); // 15 total, 10 on page 1, 5 on page 2
|
||||
expect(response.body.has_more).toBe(false);
|
||||
});
|
||||
|
||||
it('should support cursor-based pagination', async () => {
|
||||
// First page
|
||||
const firstPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5' })
|
||||
.expect(200);
|
||||
|
||||
expect(firstPage.body.promptGroups).toHaveLength(5);
|
||||
expect(firstPage.body.has_more).toBe(true);
|
||||
expect(firstPage.body.after).toBeTruthy();
|
||||
|
||||
// Second page using cursor
|
||||
const secondPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5', cursor: firstPage.body.after })
|
||||
.expect(200);
|
||||
|
||||
expect(secondPage.body.promptGroups).toHaveLength(5);
|
||||
expect(secondPage.body.has_more).toBe(true);
|
||||
expect(secondPage.body.after).toBeTruthy();
|
||||
|
||||
// Verify different groups
|
||||
const firstPageIds = firstPage.body.promptGroups.map((g) => g._id);
|
||||
const secondPageIds = secondPage.body.promptGroups.map((g) => g._id);
|
||||
expect(firstPageIds).not.toEqual(secondPageIds);
|
||||
});
|
||||
|
||||
it('should paginate correctly with category filtering', async () => {
|
||||
// Create groups with different categories
|
||||
await PromptGroup.deleteMany({}); // Clear existing groups
|
||||
await AclEntry.deleteMany({});
|
||||
|
||||
// Create 8 groups with category 'test-cat-1'
|
||||
for (let i = 0; i < 8; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `Category 1 Group ${i + 1}`,
|
||||
category: 'test-cat-1',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - i * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Create 7 groups with category 'test-cat-2'
|
||||
for (let i = 0; i < 7; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `Category 2 Group ${i + 1}`,
|
||||
category: 'test-cat-2',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - (i + 8) * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Test pagination with category filter
|
||||
const firstPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5', category: 'test-cat-1' })
|
||||
.expect(200);
|
||||
|
||||
expect(firstPage.body.promptGroups).toHaveLength(5);
|
||||
expect(firstPage.body.promptGroups.every((g) => g.category === 'test-cat-1')).toBe(true);
|
||||
expect(firstPage.body.has_more).toBe(true);
|
||||
expect(firstPage.body.after).toBeTruthy();
|
||||
|
||||
const secondPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5', cursor: firstPage.body.after, category: 'test-cat-1' })
|
||||
.expect(200);
|
||||
|
||||
expect(secondPage.body.promptGroups).toHaveLength(3); // 8 total, 5 on page 1, 3 on page 2
|
||||
expect(secondPage.body.promptGroups.every((g) => g.category === 'test-cat-1')).toBe(true);
|
||||
expect(secondPage.body.has_more).toBe(false);
|
||||
});
|
||||
|
||||
it('should paginate correctly with name/keyword filtering', async () => {
|
||||
// Create groups with specific names
|
||||
await PromptGroup.deleteMany({}); // Clear existing groups
|
||||
await AclEntry.deleteMany({});
|
||||
|
||||
// Create 12 groups with 'Search' in the name
|
||||
for (let i = 0; i < 12; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `Search Test Group ${i + 1}`,
|
||||
category: 'search-test',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - i * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Create 5 groups without 'Search' in the name
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `Other Group ${i + 1}`,
|
||||
category: 'other-test',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - (i + 12) * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Test pagination with name filter
|
||||
const firstPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '10', name: 'Search' })
|
||||
.expect(200);
|
||||
|
||||
expect(firstPage.body.promptGroups).toHaveLength(10);
|
||||
expect(firstPage.body.promptGroups.every((g) => g.name.includes('Search'))).toBe(true);
|
||||
expect(firstPage.body.has_more).toBe(true);
|
||||
expect(firstPage.body.after).toBeTruthy();
|
||||
|
||||
const secondPage = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '10', cursor: firstPage.body.after, name: 'Search' })
|
||||
.expect(200);
|
||||
|
||||
expect(secondPage.body.promptGroups).toHaveLength(2); // 12 total, 10 on page 1, 2 on page 2
|
||||
expect(secondPage.body.promptGroups.every((g) => g.name.includes('Search'))).toBe(true);
|
||||
expect(secondPage.body.has_more).toBe(false);
|
||||
});
|
||||
|
||||
it('should paginate correctly with combined filters', async () => {
|
||||
// Create groups with various combinations
|
||||
await PromptGroup.deleteMany({}); // Clear existing groups
|
||||
await AclEntry.deleteMany({});
|
||||
|
||||
// Create 6 groups matching both category and name filters
|
||||
for (let i = 0; i < 6; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `API Test Group ${i + 1}`,
|
||||
category: 'api-category',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - i * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Create groups that only match one filter
|
||||
for (let i = 0; i < 4; i++) {
|
||||
const group = await PromptGroup.create({
|
||||
name: `API Other Group ${i + 1}`,
|
||||
category: 'other-category',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
updatedAt: new Date(Date.now() - (i + 6) * 1000),
|
||||
});
|
||||
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: group._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
}
|
||||
|
||||
// Test pagination with both filters
|
||||
const response = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5', name: 'API', category: 'api-category' })
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.promptGroups).toHaveLength(5);
|
||||
expect(
|
||||
response.body.promptGroups.every(
|
||||
(g) => g.name.includes('API') && g.category === 'api-category',
|
||||
),
|
||||
).toBe(true);
|
||||
expect(response.body.has_more).toBe(true);
|
||||
expect(response.body.after).toBeTruthy();
|
||||
|
||||
// Page 2
|
||||
const page2 = await request(app)
|
||||
.get('/api/prompts/groups')
|
||||
.query({ limit: '5', cursor: response.body.after, name: 'API', category: 'api-category' })
|
||||
.expect(200);
|
||||
|
||||
expect(page2.body.promptGroups).toHaveLength(1); // 6 total, 5 on page 1, 1 on page 2
|
||||
expect(page2.body.has_more).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const express = require('express');
|
||||
const { requireJwtAuth, canDeleteAccount, verifyEmailLimiter } = require('~/server/middleware');
|
||||
const {
|
||||
getUserController,
|
||||
deleteUserController,
|
||||
verifyEmailController,
|
||||
updateUserPluginsController,
|
||||
resendVerificationController,
|
||||
getTermsStatusController,
|
||||
acceptTermsController,
|
||||
verifyEmailController,
|
||||
deleteUserController,
|
||||
getUserController,
|
||||
} = require('~/server/controllers/UserController');
|
||||
const { requireJwtAuth, canDeleteAccount, verifyEmailLimiter } = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
||||
@@ -246,7 +246,7 @@ describe('AppService', () => {
|
||||
});
|
||||
|
||||
it('should load and format tools accurately with defined structure', async () => {
|
||||
const { loadAndFormatTools } = require('./start/tools');
|
||||
const { loadAndFormatTools } = require('./ToolService');
|
||||
|
||||
const result = await AppService();
|
||||
|
||||
@@ -254,6 +254,8 @@ describe('AppService', () => {
|
||||
adminFilter: undefined,
|
||||
adminIncluded: undefined,
|
||||
directory: expect.anything(),
|
||||
imageOutputType: expect.any(String),
|
||||
fileStrategy: expect.any(String),
|
||||
});
|
||||
|
||||
// Verify tools are included in the returned config
|
||||
|
||||
@@ -16,6 +16,7 @@ const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { processRequiredActions } = require('~/server/services/ToolService');
|
||||
const { RunManager, waitForRun } = require('~/server/services/Runs');
|
||||
const { processMessages } = require('~/server/services/Threads');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { createOnProgress } = require('~/server/utils');
|
||||
const { TextStream } = require('~/app/clients');
|
||||
|
||||
@@ -350,7 +351,7 @@ async function runAssistant({
|
||||
accumulatedMessages = [],
|
||||
in_progress: inProgress,
|
||||
}) {
|
||||
const appConfig = openai.req.config;
|
||||
const appConfig = await getAppConfig({ role: openai.req.user?.role });
|
||||
let steps = accumulatedSteps;
|
||||
let messages = accumulatedMessages;
|
||||
const in_progress = inProgress ?? createInProgressHandler(openai, thread_id, messages);
|
||||
|
||||
@@ -78,7 +78,7 @@ const createTokenHash = () => {
|
||||
|
||||
/**
|
||||
* Send Verification Email
|
||||
* @param {Partial<IUser>} user
|
||||
* @param {Partial<MongoUser> & { _id: ObjectId, email: string, name: string}} user
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const sendVerificationEmail = async (user) => {
|
||||
@@ -112,7 +112,7 @@ const sendVerificationEmail = async (user) => {
|
||||
|
||||
/**
|
||||
* Verify Email
|
||||
* @param {ServerRequest} req
|
||||
* @param {Express.Request} req
|
||||
*/
|
||||
const verifyEmail = async (req) => {
|
||||
const { email, token } = req.body;
|
||||
@@ -160,9 +160,9 @@ const verifyEmail = async (req) => {
|
||||
|
||||
/**
|
||||
* Register a new user.
|
||||
* @param {IUser} user <email, password, name, username>
|
||||
* @param {Partial<IUser>} [additionalData={}]
|
||||
* @returns {Promise<{status: number, message: string, user?: IUser}>}
|
||||
* @param {MongoUser} user <email, password, name, username>
|
||||
* @param {Partial<MongoUser>} [additionalData={}]
|
||||
* @returns {Promise<{status: number, message: string, user?: MongoUser}>}
|
||||
*/
|
||||
const registerUser = async (user, additionalData = {}) => {
|
||||
const { error } = registerSchema.safeParse(user);
|
||||
@@ -196,7 +196,7 @@ const registerUser = async (user, additionalData = {}) => {
|
||||
}
|
||||
|
||||
const appConfig = await getAppConfig({ role: user.role });
|
||||
if (!isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
if (!(await isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains))) {
|
||||
const errorMessage =
|
||||
'The email address provided cannot be used. Please use a different email address.';
|
||||
logger.error(`[registerUser] [Registration not allowed] [Email: ${user.email}]`);
|
||||
@@ -248,7 +248,7 @@ const registerUser = async (user, additionalData = {}) => {
|
||||
|
||||
/**
|
||||
* Request password reset
|
||||
* @param {ServerRequest} req
|
||||
* @param {Express.Request} req
|
||||
*/
|
||||
const requestPasswordReset = async (req) => {
|
||||
const { email } = req.body;
|
||||
@@ -500,6 +500,18 @@ const resendVerificationEmail = async (req) => {
|
||||
};
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Generate a short-lived JWT token
|
||||
* @param {String} userId - The ID of the user
|
||||
* @param {String} [expireIn='5m'] - The expiration time for the token (default is 5 minutes)
|
||||
* @returns {String} - The generated JWT token
|
||||
*/
|
||||
const generateShortLivedToken = (userId, expireIn = '5m') => {
|
||||
return jwt.sign({ id: userId }, process.env.JWT_SECRET, {
|
||||
expiresIn: expireIn,
|
||||
algorithm: 'HS256',
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
logoutUser,
|
||||
@@ -510,4 +522,5 @@ module.exports = {
|
||||
setOpenIDAuthTokens,
|
||||
requestPasswordReset,
|
||||
resendVerificationEmail,
|
||||
generateShortLivedToken,
|
||||
};
|
||||
|
||||
@@ -26,7 +26,7 @@ const ToolCacheKeys = {
|
||||
* @param {string[]} [options.roleIds] - Role IDs for role-based tools
|
||||
* @param {string[]} [options.groupIds] - Group IDs for group-based tools
|
||||
* @param {boolean} [options.includeGlobal=true] - Whether to include global tools
|
||||
* @returns {Promise<LCAvailableTools|null>} The available tools object or null if not cached
|
||||
* @returns {Promise<Object|null>} The available tools object or null if not cached
|
||||
*/
|
||||
async function getCachedTools(options = {}) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
@@ -41,13 +41,13 @@ async function getCachedTools(options = {}) {
|
||||
// Future implementation will merge tools from multiple sources
|
||||
// based on user permissions, roles, and groups
|
||||
if (userId) {
|
||||
/** @type {LCAvailableTools | null} Check if we have pre-computed effective tools for this user */
|
||||
// Check if we have pre-computed effective tools for this user
|
||||
const effectiveTools = await cache.get(ToolCacheKeys.EFFECTIVE(userId));
|
||||
if (effectiveTools) {
|
||||
return effectiveTools;
|
||||
}
|
||||
|
||||
/** @type {LCAvailableTools | null} Otherwise, compute from individual sources */
|
||||
// Otherwise, compute from individual sources
|
||||
const toolSources = [];
|
||||
|
||||
if (includeGlobal) {
|
||||
|
||||
@@ -22,7 +22,7 @@ async function getEndpointsConfig(req) {
|
||||
return cachedEndpointsConfig;
|
||||
}
|
||||
|
||||
const appConfig = req.config ?? (await getAppConfig({ role: req.user?.role }));
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const defaultEndpointsConfig = await loadDefaultEndpointsConfig(appConfig);
|
||||
const customEndpointsConfig = loadCustomEndpointsConfig(appConfig?.endpoints?.custom);
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
const appConfig = require('./app');
|
||||
const { config } = require('./EndpointService');
|
||||
const getCachedTools = require('./getCachedTools');
|
||||
const mcpToolsCache = require('./mcpToolsCache');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const loadConfigModels = require('./loadConfigModels');
|
||||
const loadDefaultModels = require('./loadDefaultModels');
|
||||
const getEndpointsConfig = require('./getEndpointsConfig');
|
||||
const loadOverrideConfig = require('./loadOverrideConfig');
|
||||
const loadAsyncEndpoints = require('./loadAsyncEndpoints');
|
||||
|
||||
module.exports = {
|
||||
@@ -13,9 +13,9 @@ module.exports = {
|
||||
loadCustomConfig,
|
||||
loadConfigModels,
|
||||
loadDefaultModels,
|
||||
loadOverrideConfig,
|
||||
loadAsyncEndpoints,
|
||||
...appConfig,
|
||||
...getCachedTools,
|
||||
...mcpToolsCache,
|
||||
...getEndpointsConfig,
|
||||
};
|
||||
|
||||
@@ -6,7 +6,7 @@ const { getAppConfig } = require('./app');
|
||||
/**
|
||||
* Load config endpoints from the cached configuration object
|
||||
* @function loadConfigModels
|
||||
* @param {ServerRequest} req - The Express request object.
|
||||
* @param {Express.Request} req - The Express request object.
|
||||
*/
|
||||
async function loadConfigModels(req) {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
@@ -72,11 +72,10 @@ async function loadConfigModels(req) {
|
||||
fetchPromisesMap[uniqueKey] =
|
||||
fetchPromisesMap[uniqueKey] ||
|
||||
fetchModels({
|
||||
name,
|
||||
apiKey: API_KEY,
|
||||
baseURL: BASE_URL,
|
||||
user: req.user.id,
|
||||
direct: endpoint.directEndpoint,
|
||||
baseURL: BASE_URL,
|
||||
apiKey: API_KEY,
|
||||
name,
|
||||
userIdQuery: models.userIdQuery,
|
||||
});
|
||||
uniqueKeyToEndpointsMap[uniqueKey] = uniqueKeyToEndpointsMap[uniqueKey] || [];
|
||||
|
||||
@@ -11,7 +11,7 @@ const {
|
||||
* Loads the default models for the application.
|
||||
* @async
|
||||
* @function
|
||||
* @param {ServerRequest} req - The Express request object.
|
||||
* @param {Express.Request} req - The Express request object.
|
||||
*/
|
||||
async function loadDefaultModels(req) {
|
||||
try {
|
||||
|
||||
6
api/server/services/Config/loadOverrideConfig.js
Normal file
6
api/server/services/Config/loadOverrideConfig.js
Normal file
@@ -0,0 +1,6 @@
|
||||
// fetch some remote config
|
||||
async function loadOverrideConfig() {
|
||||
return false;
|
||||
}
|
||||
|
||||
module.exports = loadOverrideConfig;
|
||||
@@ -1,143 +0,0 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { getCachedTools, setCachedTools } = require('./getCachedTools');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* Updates MCP tools in the cache for a specific server and user
|
||||
* @param {Object} params - Parameters for updating MCP tools
|
||||
* @param {string} params.userId - User ID
|
||||
* @param {string} params.serverName - MCP server name
|
||||
* @param {Array} params.tools - Array of tool objects from MCP server
|
||||
* @returns {Promise<LCAvailableTools>}
|
||||
*/
|
||||
async function updateMCPUserTools({ userId, serverName, tools }) {
|
||||
try {
|
||||
const userTools = await getCachedTools({ userId });
|
||||
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
for (const key of Object.keys(userTools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete userTools[key];
|
||||
}
|
||||
}
|
||||
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${Constants.mcp_delimiter}${serverName}`;
|
||||
userTools[name] = {
|
||||
type: 'function',
|
||||
['function']: {
|
||||
name,
|
||||
description: tool.description,
|
||||
parameters: tool.inputSchema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
await setCachedTools(userTools, { userId });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`[MCP Cache] Updated ${tools.length} tools for ${serverName} user ${userId}`);
|
||||
return userTools;
|
||||
} catch (error) {
|
||||
logger.error(`[MCP Cache] Failed to update tools for ${serverName}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges app-level tools with global tools
|
||||
* @param {import('@librechat/api').LCAvailableTools} appTools
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function mergeAppTools(appTools) {
|
||||
try {
|
||||
const count = Object.keys(appTools).length;
|
||||
if (!count) {
|
||||
return;
|
||||
}
|
||||
const cachedTools = await getCachedTools({ includeGlobal: true });
|
||||
const mergedTools = { ...cachedTools, ...appTools };
|
||||
await setCachedTools(mergedTools, { isGlobal: true });
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`Merged ${count} app-level tools`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to merge app-level tools:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges user-level tools with global tools
|
||||
* @param {object} params
|
||||
* @param {string} params.userId
|
||||
* @param {Record<string, FunctionTool>} params.cachedUserTools
|
||||
* @param {import('@librechat/api').LCAvailableTools} params.userTools
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function mergeUserTools({ userId, cachedUserTools, userTools }) {
|
||||
try {
|
||||
if (!userId) {
|
||||
return;
|
||||
}
|
||||
const count = Object.keys(userTools).length;
|
||||
if (!count) {
|
||||
return;
|
||||
}
|
||||
const cachedTools = cachedUserTools ?? (await getCachedTools({ userId }));
|
||||
const mergedTools = { ...cachedTools, ...userTools };
|
||||
await setCachedTools(mergedTools, { userId });
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
logger.debug(`Merged ${count} user-level tools`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to merge user-level tools:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all MCP tools for a specific server
|
||||
* @param {Object} params - Parameters for clearing MCP tools
|
||||
* @param {string} [params.userId] - User ID (if clearing user-specific tools)
|
||||
* @param {string} params.serverName - MCP server name
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async function clearMCPServerTools({ userId, serverName }) {
|
||||
try {
|
||||
const tools = await getCachedTools({ userId, includeGlobal: !userId });
|
||||
|
||||
// Remove all tools for this server
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
let removedCount = 0;
|
||||
for (const key of Object.keys(tools)) {
|
||||
if (key.endsWith(`${mcpDelimiter}${serverName}`)) {
|
||||
delete tools[key];
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
await setCachedTools(tools, userId ? { userId } : { isGlobal: true });
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.delete(CacheKeys.TOOLS);
|
||||
|
||||
logger.debug(
|
||||
`[MCP Cache] Removed ${removedCount} tools for ${serverName}${userId ? ` user ${userId}` : ' (global)'}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[MCP Cache] Failed to clear tools for ${serverName}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mergeAppTools,
|
||||
mergeUserTools,
|
||||
updateMCPUserTools,
|
||||
clearMCPServerTools,
|
||||
};
|
||||
@@ -16,6 +16,7 @@ const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||
const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const { getFiles, getToolFilesByIds } = require('~/models/File');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
|
||||
@@ -30,13 +31,7 @@ const { getModelMaxTokens } = require('~/utils');
|
||||
* @param {TEndpointOption} [params.endpointOption]
|
||||
* @param {Set<string>} [params.allowedProviders]
|
||||
* @param {boolean} [params.isInitialAgent]
|
||||
* @returns {Promise<Agent & {
|
||||
* tools: StructuredTool[],
|
||||
* attachments: Array<MongoFile>,
|
||||
* toolContextMap: Record<string, unknown>,
|
||||
* maxContextTokens: number,
|
||||
* userMCPAuthMap?: Record<string, Record<string, string>>
|
||||
* }>}
|
||||
* @returns {Promise<Agent & { tools: StructuredTool[], attachments: Array<MongoFile>, toolContextMap: Record<string, unknown>, maxContextTokens: number }>}
|
||||
*/
|
||||
const initializeAgent = async ({
|
||||
req,
|
||||
@@ -49,7 +44,7 @@ const initializeAgent = async ({
|
||||
allowedProviders,
|
||||
isInitialAgent = false,
|
||||
}) => {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
if (
|
||||
isAgentsEndpoint(endpointOption?.endpoint) &&
|
||||
allowedProviders.size > 0 &&
|
||||
@@ -99,19 +94,16 @@ const initializeAgent = async ({
|
||||
});
|
||||
|
||||
const provider = agent.provider;
|
||||
const {
|
||||
tools: structuredTools,
|
||||
toolContextMap,
|
||||
userMCPAuthMap,
|
||||
} = (await loadTools?.({
|
||||
req,
|
||||
res,
|
||||
provider,
|
||||
agentId: agent.id,
|
||||
tools: agent.tools,
|
||||
model: agent.model,
|
||||
tool_resources,
|
||||
})) ?? {};
|
||||
const { tools: structuredTools, toolContextMap } =
|
||||
(await loadTools?.({
|
||||
req,
|
||||
res,
|
||||
provider,
|
||||
agentId: agent.id,
|
||||
tools: agent.tools,
|
||||
model: agent.model,
|
||||
tool_resources,
|
||||
})) ?? {};
|
||||
|
||||
agent.endpoint = provider;
|
||||
const { getOptions, overrideProvider } = getProviderConfig({ provider, appConfig });
|
||||
@@ -200,7 +192,6 @@ const initializeAgent = async ({
|
||||
tools,
|
||||
attachments,
|
||||
resendFiles,
|
||||
userMCPAuthMap,
|
||||
toolContextMap,
|
||||
useLegacyContent: !!options.useLegacyContent,
|
||||
maxContextTokens: Math.round((agentMaxContextTokens - maxTokens) * 0.9),
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createContentAggregator } = require('@librechat/agents');
|
||||
const {
|
||||
validateAgentModel,
|
||||
getCustomEndpointConfig,
|
||||
createSequentialChainEdges,
|
||||
} = require('@librechat/api');
|
||||
const { validateAgentModel, getCustomEndpointConfig } = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
@@ -19,13 +15,11 @@ const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||
const { loadAgentTools } = require('~/server/services/ToolService');
|
||||
const AgentClient = require('~/server/controllers/agents/client');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
/**
|
||||
* @param {AbortSignal} signal
|
||||
*/
|
||||
function createToolLoader(signal) {
|
||||
function createToolLoader() {
|
||||
/**
|
||||
* @param {object} params
|
||||
* @param {ServerRequest} params.req
|
||||
@@ -35,11 +29,7 @@ function createToolLoader(signal) {
|
||||
* @param {string} params.provider
|
||||
* @param {string} params.model
|
||||
* @param {AgentToolResources} params.tool_resources
|
||||
* @returns {Promise<{
|
||||
* tools: StructuredTool[],
|
||||
* toolContextMap: Record<string, unknown>,
|
||||
* userMCPAuthMap?: Record<string, Record<string, string>>
|
||||
* } | undefined>}
|
||||
* @returns {Promise<{ tools: StructuredTool[], toolContextMap: Record<string, unknown> } | undefined>}
|
||||
*/
|
||||
return async function loadTools({ req, res, agentId, tools, provider, model, tool_resources }) {
|
||||
const agent = { id: agentId, tools, provider, model };
|
||||
@@ -48,7 +38,6 @@ function createToolLoader(signal) {
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
signal,
|
||||
tool_resources,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -57,11 +46,11 @@ function createToolLoader(signal) {
|
||||
};
|
||||
}
|
||||
|
||||
const initializeClient = async ({ req, res, signal, endpointOption }) => {
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
if (!endpointOption) {
|
||||
throw new Error('Endpoint option not provided');
|
||||
}
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
|
||||
// TODO: use endpointOption to determine options/modelOptions
|
||||
/** @type {Array<UsageMetadata>} */
|
||||
@@ -103,7 +92,7 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
|
||||
const agentConfigs = new Map();
|
||||
const allowedProviders = new Set(appConfig?.endpoints?.[EModelEndpoint.agents]?.allowedProviders);
|
||||
|
||||
const loadTools = createToolLoader(signal);
|
||||
const loadTools = createToolLoader();
|
||||
/** @type {Array<MongoFile>} */
|
||||
const requestFiles = req.body.files ?? [];
|
||||
/** @type {string} */
|
||||
@@ -122,91 +111,39 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
|
||||
});
|
||||
|
||||
const agent_ids = primaryConfig.agent_ids;
|
||||
let userMCPAuthMap = primaryConfig.userMCPAuthMap;
|
||||
|
||||
async function processAgent(agentId) {
|
||||
const agent = await getAgent({ id: agentId });
|
||||
if (!agent) {
|
||||
throw new Error(`Agent ${agentId} not found`);
|
||||
}
|
||||
|
||||
const validationResult = await validateAgentModel({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
modelsConfig,
|
||||
logViolation,
|
||||
});
|
||||
|
||||
if (!validationResult.isValid) {
|
||||
throw new Error(validationResult.error?.message);
|
||||
}
|
||||
|
||||
const config = await initializeAgent({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
loadTools,
|
||||
requestFiles,
|
||||
conversationId,
|
||||
endpointOption,
|
||||
allowedProviders,
|
||||
});
|
||||
if (userMCPAuthMap != null) {
|
||||
Object.assign(userMCPAuthMap, config.userMCPAuthMap ?? {});
|
||||
} else {
|
||||
userMCPAuthMap = config.userMCPAuthMap;
|
||||
}
|
||||
agentConfigs.set(agentId, config);
|
||||
}
|
||||
|
||||
let edges = primaryConfig.edges;
|
||||
const checkAgentInit = (agentId) => agentId === primaryConfig.id || agentConfigs.has(agentId);
|
||||
if ((edges?.length ?? 0) > 0) {
|
||||
for (const edge of edges) {
|
||||
if (Array.isArray(edge.to)) {
|
||||
for (const to of edge.to) {
|
||||
if (checkAgentInit(to)) {
|
||||
continue;
|
||||
}
|
||||
await processAgent(to);
|
||||
}
|
||||
} else if (typeof edge.to === 'string' && checkAgentInit(edge.to)) {
|
||||
continue;
|
||||
} else if (typeof edge.to === 'string') {
|
||||
await processAgent(edge.to);
|
||||
}
|
||||
|
||||
if (Array.isArray(edge.from)) {
|
||||
for (const from of edge.from) {
|
||||
if (checkAgentInit(from)) {
|
||||
continue;
|
||||
}
|
||||
await processAgent(from);
|
||||
}
|
||||
} else if (typeof edge.from === 'string' && checkAgentInit(edge.from)) {
|
||||
continue;
|
||||
} else if (typeof edge.from === 'string') {
|
||||
await processAgent(edge.from);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @deprecated Agent Chain */
|
||||
if (agent_ids?.length) {
|
||||
for (const agentId of agent_ids) {
|
||||
if (checkAgentInit(agentId)) {
|
||||
continue;
|
||||
const agent = await getAgent({ id: agentId });
|
||||
if (!agent) {
|
||||
throw new Error(`Agent ${agentId} not found`);
|
||||
}
|
||||
await processAgent(agentId);
|
||||
|
||||
const validationResult = await validateAgentModel({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
modelsConfig,
|
||||
logViolation,
|
||||
});
|
||||
|
||||
if (!validationResult.isValid) {
|
||||
throw new Error(validationResult.error?.message);
|
||||
}
|
||||
|
||||
const config = await initializeAgent({
|
||||
req,
|
||||
res,
|
||||
agent,
|
||||
loadTools,
|
||||
requestFiles,
|
||||
conversationId,
|
||||
endpointOption,
|
||||
allowedProviders,
|
||||
});
|
||||
agentConfigs.set(agentId, config);
|
||||
}
|
||||
|
||||
const chain = await createSequentialChainEdges([primaryConfig.id].concat(agent_ids), '{convo}');
|
||||
edges = edges ? edges.concat(chain) : chain;
|
||||
}
|
||||
|
||||
primaryConfig.edges = edges;
|
||||
|
||||
let endpointConfig = appConfig.endpoints?.[primaryConfig.endpoint];
|
||||
if (!isAgentsEndpoint(primaryConfig.endpoint) && !endpointConfig) {
|
||||
try {
|
||||
@@ -254,7 +191,7 @@ const initializeClient = async ({ req, res, signal, endpointOption }) => {
|
||||
: EModelEndpoint.agents,
|
||||
});
|
||||
|
||||
return { client, userMCPAuthMap };
|
||||
return { client };
|
||||
};
|
||||
|
||||
module.exports = { initializeClient };
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { saveConvo } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Add title to conversation in a way that avoids memory retention
|
||||
|
||||
@@ -6,7 +6,6 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
fileTokenLimit,
|
||||
resendFiles = anthropicSettings.resendFiles.default,
|
||||
promptCache = anthropicSettings.promptCache.default,
|
||||
thinking = anthropicSettings.thinking.default,
|
||||
@@ -30,7 +29,6 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
fileTokenLimit,
|
||||
modelOptions,
|
||||
});
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
||||
const AnthropicClient = require('~/app/clients/AnthropicClient');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;
|
||||
const expiresAt = req.body.key;
|
||||
const isUserProvided = ANTHROPIC_API_KEY === 'user_provided';
|
||||
@@ -27,21 +28,20 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
const anthropicConfig = appConfig.endpoints?.[EModelEndpoint.anthropic];
|
||||
|
||||
if (anthropicConfig) {
|
||||
clientOptions._lc_stream_delay = anthropicConfig.streamRate;
|
||||
clientOptions.streamRate = anthropicConfig.streamRate;
|
||||
clientOptions.titleModel = anthropicConfig.titleModel;
|
||||
}
|
||||
|
||||
const allConfig = appConfig.endpoints?.all;
|
||||
if (allConfig) {
|
||||
clientOptions._lc_stream_delay = allConfig.streamRate;
|
||||
clientOptions.streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
if (optionsOnly) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
proxy: PROXY ?? null,
|
||||
userId: req.user.id,
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
modelOptions: endpointOption?.model_parameters ?? {},
|
||||
},
|
||||
clientOptions,
|
||||
|
||||
@@ -15,7 +15,6 @@ const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = requir
|
||||
* @param {number} [options.modelOptions.topK] - Controls the number of top tokens to consider.
|
||||
* @param {string[]} [options.modelOptions.stop] - Sequences where the API will stop generating further tokens.
|
||||
* @param {boolean} [options.modelOptions.stream] - Whether to stream the response.
|
||||
* @param {string} options.userId - The user ID for tracking and personalization.
|
||||
* @param {string} [options.proxy] - Proxy server URL.
|
||||
* @param {string} [options.reverseProxyUrl] - URL for a reverse proxy, if used.
|
||||
*
|
||||
@@ -48,11 +47,6 @@ function getLLMConfig(apiKey, options = {}) {
|
||||
maxTokens:
|
||||
mergedOptions.maxOutputTokens || anthropicSettings.maxOutputTokens.reset(mergedOptions.model),
|
||||
clientOptions: {},
|
||||
invocationKwargs: {
|
||||
metadata: {
|
||||
user_id: options.userId,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
requestOptions = configureReasoning(requestOptions, systemOptions);
|
||||
|
||||
@@ -7,6 +7,7 @@ const {
|
||||
getUserKeyValues,
|
||||
getUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const OAIClient = require('~/app/clients/OpenAIClient');
|
||||
|
||||
class Files {
|
||||
@@ -48,7 +49,7 @@ class Files {
|
||||
}
|
||||
|
||||
const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => {
|
||||
const appConfig = req.config;
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } =
|
||||
process.env;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// const OpenAI = require('openai');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||
const initializeClient = require('./initialize');
|
||||
// const { OpenAIClient } = require('~/app');
|
||||
@@ -12,7 +12,14 @@ jest.mock('~/server/services/UserService', () => ({
|
||||
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||
}));
|
||||
|
||||
// Config is now passed via req.config, not getAppConfig
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getAppConfig: jest.fn().mockResolvedValue({
|
||||
azureAssistants: {
|
||||
apiKey: 'test-key',
|
||||
baseURL: 'https://test.url',
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
const today = new Date();
|
||||
const tenDaysFromToday = new Date(today.setDate(today.getDate() + 10));
|
||||
@@ -43,11 +50,7 @@ describe('initializeClient', () => {
|
||||
isUserProvided: jest.fn().mockReturnValueOnce(false),
|
||||
}));
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app,
|
||||
config: { endpoints: { [EModelEndpoint.azureOpenAI]: {} } },
|
||||
};
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
const res = {};
|
||||
|
||||
const { openai, openAIApiKey } = await initializeClient({ req, res });
|
||||
@@ -63,11 +66,7 @@ describe('initializeClient', () => {
|
||||
getUserKeyValues.mockResolvedValue({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' });
|
||||
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app,
|
||||
config: { endpoints: { [EModelEndpoint.azureOpenAI]: {} } },
|
||||
};
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
const res = {};
|
||||
|
||||
const { openai, openAIApiKey } = await initializeClient({ req, res });
|
||||
@@ -84,7 +83,7 @@ describe('initializeClient', () => {
|
||||
let userValues = getUserKey();
|
||||
try {
|
||||
userValues = JSON.parse(userValues);
|
||||
} catch {
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.INVALID_USER_KEY,
|
||||
@@ -94,10 +93,7 @@ describe('initializeClient', () => {
|
||||
return userValues;
|
||||
});
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
config: { endpoints: { [EModelEndpoint.azureOpenAI]: {} } },
|
||||
};
|
||||
const req = { user: { id: 'user123' } };
|
||||
const res = {};
|
||||
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/invalid_user_key/);
|
||||
@@ -106,11 +102,7 @@ describe('initializeClient', () => {
|
||||
test('throws error if API key is not provided', async () => {
|
||||
delete process.env.AZURE_ASSISTANTS_API_KEY; // Simulate missing API key
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app,
|
||||
config: { endpoints: { [EModelEndpoint.azureOpenAI]: {} } },
|
||||
};
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
const res = {};
|
||||
|
||||
await expect(initializeClient({ req, res })).rejects.toThrow(/Assistants API key not/);
|
||||
@@ -120,11 +112,7 @@ describe('initializeClient', () => {
|
||||
process.env.AZURE_ASSISTANTS_API_KEY = 'test-key';
|
||||
process.env.PROXY = 'http://proxy.server';
|
||||
|
||||
const req = {
|
||||
user: { id: 'user123' },
|
||||
app,
|
||||
config: { endpoints: { [EModelEndpoint.azureOpenAI]: {} } },
|
||||
};
|
||||
const req = { user: { id: 'user123' }, app };
|
||||
const res = {};
|
||||
|
||||
const { openai } = await initializeClient({ req, res });
|
||||
|
||||
@@ -6,7 +6,6 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
modelLabel: name,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
fileTokenLimit,
|
||||
resendFiles = true,
|
||||
imageDetail,
|
||||
iconURL,
|
||||
@@ -25,7 +24,6 @@ const buildOptions = (endpoint, parsedBody) => {
|
||||
spec,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
fileTokenLimit,
|
||||
model_parameters,
|
||||
});
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user