Compare commits

...

7 Commits

Author SHA1 Message Date
Danny Avila
7251308244 mcp example, mock i/o for client-to-server communications 2025-06-26 13:33:59 -04:00
Danny Avila
799f0e5810 🐛 fix: Move MemoryEntry and PluginAuth model retrieval inside methods for Runtime Usage 2025-06-25 20:58:34 -04:00
Danny Avila
cbda3cb529 🕐 feat: Configurable Retention Period for Temporary Chats (#8056)
* feat: Add configurable retention period for temporary chats

* Addressing eslint errors

* Fix: failing test due to missing registration

* Update: variable name and use hours instead of days for chat retention

* Addressing comments

* chore: fix import order in Conversation.js

* chore: import order in Message.js

* chore: fix import order in config.ts

* chore: move common methods to packages/api to reduce potential for circular dependencies

* refactor: update temp chat retention config type to Partial<TCustomConfig>

* refactor: remove unused config variable from AppService and update loadCustomConfig tests with logger mock

* refactor: handle model undefined edge case by moving Session model initialization inside methods

---------

Co-authored-by: Rakshit Tiwari <rak1729e@gmail.com>
2025-06-25 17:16:26 -04:00
Karol Potocki
3ab1bd65e5 🐛 fix: Support Bedrock Provider for MCP Image Content Rendering (#8047) 2025-06-25 15:38:24 -04:00
Marlon
c551ba21f5 📜 chore: Update .env.example (#8043)
Update recent Gemini model names and remove deprecated Gemini models from env.example
2025-06-25 15:31:24 -04:00
Danny Avila
c87422a1e0 🧠 feat: Thinking Budget, Include Thoughts, and Dynamic Thinking for Gemini 2.5 (#8055)
* feat: support thinking budget parameter for Gemini 2.5 series (#6949, #7542)

https://ai.google.dev/gemini-api/docs/thinking#set-budget

* refactor: update thinking budget minimum value to -1 for dynamic thinking

- see: https://ai.google.dev/gemini-api/docs/thinking#set-budget

* chore: bump @librechat/agents to v2.4.43

* refactor: rename LLMConfigOptions to OpenAIConfigOptions for clarity and consistency

- Updated type definitions and references in initialize.ts, llm.ts, and openai.ts to reflect the new naming convention.
- Ensured that the OpenAI configuration options are consistently used across the relevant files.

* refactor: port Google LLM methods to TypeScript Package

* chore: update @librechat/agents version to 2.4.43 in package-lock.json and package.json

* refactor: update thinking budget description for clarity and adjust placeholder in parameter settings

* refactor: enhance googleSettings default value for thinking budget to support dynamic adjustment

* chore: update @librechat/agents to v2.4.44 for Vertex Dynamic Thinking workaround

* refactor: rename google config function, update `createRun` types, use `reasoning` as `reasoningKey` for Google

* refactor: simplify placeholder handling in DynamicInput component

* refactor: enhance thinking budget description for clarity and allow automatic decision by setting to "-1"

* refactor: update text styling in OptionHover component for improved readability

* chore: update @librechat/agents dependency to v2.4.46 in package.json and package-lock.json

* chore: update @librechat/api version to 1.2.5 in package.json and package-lock.json

* refactor: enhance `clientOptions` handling by filtering `omitTitleOptions`, add `json` field for Google models

---------

Co-authored-by: ciffelia <15273128+ciffelia@users.noreply.github.com>
2025-06-25 15:14:33 -04:00
Dustin Healy
b169306096 🧪 ci: Add Tests for Custom Endpoint Header Resolution (#8045)
* Enhanced existing tests for the `resolveHeaders` function to cover all user field placeholders and messy scenarios.
* Added basic integration tests for custom endpoints initialization file
2025-06-24 21:11:06 -04:00
69 changed files with 1146 additions and 273 deletions

View File

@@ -142,10 +142,10 @@ GOOGLE_KEY=user_provided
# GOOGLE_AUTH_HEADER=true
# Gemini API (AI Studio)
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash,gemini-2.0-flash-lite
# Vertex AI
# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001
@@ -657,4 +657,4 @@ OPENWEATHER_API_KEY=
# Reranker (Required)
# JINA_API_KEY=your_jina_api_key
# or
# COHERE_API_KEY=your_cohere_api_key
# COHERE_API_KEY=your_cohere_api_key

View File

@@ -1,7 +1,7 @@
const { google } = require('googleapis');
const { Tokenizer } = require('@librechat/api');
const { concat } = require('@langchain/core/utils/stream');
const { ChatVertexAI } = require('@langchain/google-vertexai');
const { Tokenizer, getSafetySettings } = require('@librechat/api');
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
const { HumanMessage, SystemMessage } = require('@langchain/core/messages');
@@ -12,13 +12,13 @@ const {
endpointSettings,
parseTextParts,
EModelEndpoint,
googleSettings,
ContentTypes,
VisionModes,
ErrorTypes,
Constants,
AuthKeys,
} = require('librechat-data-provider');
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
const { encodeAndFormat } = require('~/server/services/Files/images');
const { spendTokens } = require('~/models/spendTokens');
const { getModelMaxTokens } = require('~/utils');
@@ -166,6 +166,16 @@ class GoogleClient extends BaseClient {
);
}
// Add thinking configuration
this.modelOptions.thinkingConfig = {
thinkingBudget:
(this.modelOptions.thinking ?? googleSettings.thinking.default)
? this.modelOptions.thinkingBudget
: 0,
};
delete this.modelOptions.thinking;
delete this.modelOptions.thinkingBudget;
this.sender =
this.options.sender ??
getResponseSender({

View File

@@ -1,7 +1,8 @@
const { logger } = require('@librechat/data-schemas');
const { isEnabled, math } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider');
const { isEnabled, math, removePorts } = require('~/server/utils');
const { deleteAllUserSessions } = require('~/models');
const { removePorts } = require('~/server/utils');
const getLogStores = require('./getLogStores');
const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {};

View File

@@ -1,7 +1,7 @@
const { Keyv } = require('keyv');
const { isEnabled, math } = require('@librechat/api');
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
const { logFile, violationFile } = require('./keyvFiles');
const { isEnabled, math } = require('~/server/utils');
const keyvRedis = require('./keyvRedis');
const keyvMongo = require('./keyvMongo');

View File

@@ -1,4 +1,6 @@
const { logger } = require('@librechat/data-schemas');
const { createTempChatExpirationDate } = require('@librechat/api');
const getCustomConfig = require('~/server/services/Config/loadCustomConfig');
const { getMessages, deleteMessages } = require('./Message');
const { Conversation } = require('~/db/models');
@@ -98,10 +100,15 @@ module.exports = {
update.conversationId = newConversationId;
}
if (req.body.isTemporary) {
const expiredAt = new Date();
expiredAt.setDate(expiredAt.getDate() + 30);
update.expiredAt = expiredAt;
if (req?.body?.isTemporary) {
try {
const customConfig = await getCustomConfig();
update.expiredAt = createTempChatExpirationDate(customConfig);
} catch (err) {
logger.error('Error creating temporary chat expiration date:', err);
logger.info(`---\`saveConvo\` context: ${metadata?.context}`);
update.expiredAt = null;
}
} else {
update.expiredAt = null;
}

View File

@@ -1,5 +1,7 @@
const { z } = require('zod');
const { logger } = require('@librechat/data-schemas');
const { createTempChatExpirationDate } = require('@librechat/api');
const getCustomConfig = require('~/server/services/Config/loadCustomConfig');
const { Message } = require('~/db/models');
const idSchema = z.string().uuid();
@@ -54,9 +56,14 @@ async function saveMessage(req, params, metadata) {
};
if (req?.body?.isTemporary) {
const expiredAt = new Date();
expiredAt.setDate(expiredAt.getDate() + 30);
update.expiredAt = expiredAt;
try {
const customConfig = await getCustomConfig();
update.expiredAt = createTempChatExpirationDate(customConfig);
} catch (err) {
logger.error('Error creating temporary chat expiration date:', err);
logger.info(`---\`saveMessage\` context: ${metadata?.context}`);
update.expiredAt = null;
}
} else {
update.expiredAt = null;
}

View File

@@ -48,7 +48,7 @@
"@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.42",
"@librechat/agents": "^2.4.46",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@node-saml/passport-saml": "^5.0.0",

View File

@@ -1,17 +1,17 @@
const cookies = require('cookie');
const jwt = require('jsonwebtoken');
const openIdClient = require('openid-client');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
registerUser,
resetPassword,
setAuthTokens,
requestPasswordReset,
setOpenIDAuthTokens,
resetPassword,
setAuthTokens,
registerUser,
} = require('~/server/services/AuthService');
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
const { getOpenIdConfig } = require('~/strategies');
const { isEnabled } = require('~/server/utils');
const registrationController = async (req, res) => {
try {

View File

@@ -1,3 +1,5 @@
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { getResponseSender } = require('librechat-data-provider');
const {
handleAbortError,
@@ -10,9 +12,8 @@ const {
clientRegistry,
requestDataMap,
} = require('~/server/cleanup');
const { sendMessage, createOnProgress } = require('~/server/utils');
const { createOnProgress } = require('~/server/utils');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
const EditController = async (req, res, next, initializeClient) => {
let {
@@ -198,7 +199,7 @@ const EditController = async (req, res, next, initializeClient) => {
const finalUserMessage = reqDataContext.userMessage;
const finalResponseMessage = { ...response };
sendMessage(res, {
sendEvent(res, {
final: true,
conversation,
title: conversation.title,

View File

@@ -44,6 +44,17 @@ const BaseClient = require('~/app/clients/BaseClient');
const { loadAgent } = require('~/models/Agent');
const { getMCPManager } = require('~/config');
const omitTitleOptions = new Set([
'stream',
'thinking',
'streaming',
'clientOptions',
'thinkingConfig',
'thinkingBudget',
'includeThoughts',
'maxOutputTokens',
]);
/**
* @param {ServerRequest} req
* @param {Agent} agent
@@ -1038,6 +1049,16 @@ class AgentClient extends BaseClient {
delete clientOptions.maxTokens;
}
clientOptions = Object.assign(
Object.fromEntries(
Object.entries(clientOptions).filter(([key]) => !omitTitleOptions.has(key)),
),
);
if (provider === Providers.GOOGLE) {
clientOptions.json = true;
}
try {
const titleResult = await this.run.generateTitle({
provider,

View File

@@ -1,10 +1,10 @@
// errorHandler.js
const { logger } = require('~/config');
const getLogStores = require('~/cache/getLogStores');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, ViolationTypes } = require('librechat-data-provider');
const { sendResponse } = require('~/server/middleware/error');
const { recordUsage } = require('~/server/services/Threads');
const { getConvo } = require('~/models/Conversation');
const { sendResponse } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores');
/**
* @typedef {Object} ErrorHandlerContext
@@ -75,7 +75,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
} else if (/Files.*are invalid/.test(error.message)) {
const errorMessage = `Files are invalid, or may not have uploaded yet.${
endpoint === 'azureAssistants'
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
: ''
}`;
return sendResponse(req, res, messageData, errorMessage);

View File

@@ -1,3 +1,5 @@
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { Constants } = require('librechat-data-provider');
const {
handleAbortError,
@@ -5,9 +7,7 @@ const {
cleanupAbortController,
} = require('~/server/middleware');
const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup');
const { sendMessage } = require('~/server/utils');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
const AgentController = async (req, res, next, initializeClient, addTitle) => {
let {
@@ -206,7 +206,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
// Create a new response object with minimal copies
const finalResponse = { ...response };
sendMessage(res, {
sendEvent(res, {
final: true,
conversation,
title: conversation.title,

View File

@@ -1,4 +1,7 @@
const { v4 } = require('uuid');
const { sleep } = require('@librechat/agents');
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
Time,
Constants,
@@ -19,20 +22,20 @@ const {
addThreadMetadata,
saveAssistantMessage,
} = require('~/server/services/Threads');
const { sendResponse, sendMessage, sleep, countTokens } = require('~/server/utils');
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
const { createRun, StreamRunManager } = require('~/server/services/Runs');
const { addTitle } = require('~/server/services/Endpoints/assistants');
const { createRunBody } = require('~/server/services/createRunBody');
const { sendResponse } = require('~/server/middleware/error');
const { getTransactions } = require('~/models/Transaction');
const { checkBalance } = require('~/models/balanceMethods');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const { countTokens } = require('~/server/utils');
const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
/**
* @route POST /
@@ -471,7 +474,7 @@ const chatV1 = async (req, res) => {
await Promise.all(promises);
const sendInitialResponse = () => {
sendMessage(res, {
sendEvent(res, {
sync: true,
conversationId,
// messages: previousMessages,
@@ -587,7 +590,7 @@ const chatV1 = async (req, res) => {
iconURL: endpointOption.iconURL,
};
sendMessage(res, {
sendEvent(res, {
final: true,
conversation,
requestMessage: {

View File

@@ -1,4 +1,7 @@
const { v4 } = require('uuid');
const { sleep } = require('@librechat/agents');
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
Time,
Constants,
@@ -22,15 +25,14 @@ const { createErrorHandler } = require('~/server/controllers/assistants/errors')
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { createRun, StreamRunManager } = require('~/server/services/Runs');
const { addTitle } = require('~/server/services/Endpoints/assistants');
const { sendMessage, sleep, countTokens } = require('~/server/utils');
const { createRunBody } = require('~/server/services/createRunBody');
const { getTransactions } = require('~/models/Transaction');
const { checkBalance } = require('~/models/balanceMethods');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const { countTokens } = require('~/server/utils');
const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
/**
* @route POST /
@@ -309,7 +311,7 @@ const chatV2 = async (req, res) => {
await Promise.all(promises);
const sendInitialResponse = () => {
sendMessage(res, {
sendEvent(res, {
sync: true,
conversationId,
// messages: previousMessages,
@@ -432,7 +434,7 @@ const chatV2 = async (req, res) => {
iconURL: endpointOption.iconURL,
};
sendMessage(res, {
sendEvent(res, {
final: true,
conversation,
requestMessage: {

View File

@@ -1,10 +1,10 @@
// errorHandler.js
const { sendResponse } = require('~/server/utils');
const { logger } = require('~/config');
const getLogStores = require('~/cache/getLogStores');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, ViolationTypes, ContentTypes } = require('librechat-data-provider');
const { getConvo } = require('~/models/Conversation');
const { recordUsage, checkMessageGaps } = require('~/server/services/Threads');
const { sendResponse } = require('~/server/middleware/error');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
/**
* @typedef {Object} ErrorHandlerContext
@@ -78,7 +78,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch
} else if (/Files.*are invalid/.test(error.message)) {
const errorMessage = `Files are invalid, or may not have uploaded yet.${
endpoint === 'azureAssistants'
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
: ''
}`;
return sendResponse(req, res, messageData, errorMessage);

View File

@@ -1,13 +1,13 @@
// abortMiddleware.js
const { logger } = require('@librechat/data-schemas');
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider');
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
const clearPendingReq = require('~/cache/clearPendingReq');
const { sendError } = require('~/server/middleware/error');
const { spendTokens } = require('~/models/spendTokens');
const abortControllers = require('./abortControllers');
const { saveMessage, getConvo } = require('~/models');
const { abortRun } = require('./abortRun');
const { logger } = require('~/config');
const abortDataMap = new WeakMap();
@@ -101,7 +101,7 @@ async function abortMessage(req, res) {
cleanupAbortController(abortKey);
if (res.headersSent && finalEvent) {
return sendMessage(res, finalEvent);
return sendEvent(res, finalEvent);
}
res.setHeader('Content-Type', 'application/json');
@@ -174,7 +174,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
* @param {string} responseMessageId
*/
const onStart = (userMessage, responseMessageId) => {
sendMessage(res, { message: userMessage, created: true });
sendEvent(res, { message: userMessage, created: true });
const abortKey = userMessage?.conversationId ?? req.user.id;
getReqData({ abortKey });

View File

@@ -1,11 +1,11 @@
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, RunStatus, isUUID } = require('librechat-data-provider');
const { initializeClient } = require('~/server/services/Endpoints/assistants');
const { checkMessageGaps, recordUsage } = require('~/server/services/Threads');
const { deleteMessages } = require('~/models/Message');
const { getConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores');
const { sendMessage } = require('~/server/utils');
const { logger } = require('~/config');
const three_minutes = 1000 * 60 * 3;
@@ -34,7 +34,7 @@ async function abortRun(req, res) {
const [thread_id, run_id] = runValues.split(':');
if (!run_id) {
logger.warn('[abortRun] Couldn\'t find run for cancel request', { thread_id });
logger.warn("[abortRun] Couldn't find run for cancel request", { thread_id });
return res.status(204).send({ message: 'Run not found' });
} else if (run_id === 'cancelled') {
logger.warn('[abortRun] Run already cancelled', { thread_id });
@@ -93,7 +93,7 @@ async function abortRun(req, res) {
};
if (res.headersSent && finalEvent) {
return sendMessage(res, finalEvent);
return sendEvent(res, finalEvent);
}
res.json(finalEvent);

View File

@@ -1,6 +1,7 @@
const crypto = require('crypto');
const { sendEvent } = require('@librechat/api');
const { getResponseSender, Constants } = require('librechat-data-provider');
const { sendMessage, sendError } = require('~/server/utils');
const { sendError } = require('~/server/middleware/error');
const { saveMessage } = require('~/models');
/**
@@ -36,7 +37,7 @@ const denyRequest = async (req, res, errorMessage) => {
isCreatedByUser: true,
text,
};
sendMessage(res, { message: userMessage, created: true });
sendEvent(res, { message: userMessage, created: true });
const shouldSaveMessage = _convoId && parentMessageId && parentMessageId !== Constants.NO_PARENT;

View File

@@ -1,31 +1,9 @@
const crypto = require('crypto');
const { logger } = require('@librechat/data-schemas');
const { parseConvo } = require('librechat-data-provider');
const { sendEvent, handleError } = require('@librechat/api');
const { saveMessage, getMessages } = require('~/models/Message');
const { getConvo } = require('~/models/Conversation');
const { logger } = require('~/config');
/**
* Sends error data in Server Sent Events format and ends the response.
* @param {object} res - The server response.
* @param {string} message - The error message.
*/
const handleError = (res, message) => {
res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`);
res.end();
};
/**
* Sends message data in Server Sent Events format.
* @param {Express.Response} res - - The server response.
* @param {string | Object} message - The message to be sent.
* @param {'message' | 'error' | 'cancel'} event - [Optional] The type of event. Default is 'message'.
*/
const sendMessage = (res, message, event = 'message') => {
if (typeof message === 'string' && message.length === 0) {
return;
}
res.write(`event: ${event}\ndata: ${JSON.stringify(message)}\n\n`);
};
/**
* Processes an error with provided options, saves the error message and sends a corresponding SSE response
@@ -91,7 +69,7 @@ const sendError = async (req, res, options, callback) => {
convo = parseConvo(errorMessage);
}
return sendMessage(res, {
return sendEvent(res, {
final: true,
requestMessage: query?.[0] ? query[0] : requestMessage,
responseMessage: errorMessage,
@@ -120,12 +98,10 @@ const sendResponse = (req, res, data, errorMessage) => {
if (errorMessage) {
return sendError(req, res, { ...data, text: errorMessage });
}
return sendMessage(res, data);
return sendEvent(res, data);
};
module.exports = {
sendResponse,
handleError,
sendMessage,
sendError,
sendResponse,
};

View File

@@ -1,4 +1,5 @@
const express = require('express');
const { addTool } = require('@librechat/api');
const { callTool, verifyToolAuth, getToolCalls } = require('~/server/controllers/tools');
const { getAvailableTools } = require('~/server/controllers/PluginController');
const { toolCallLimiter } = require('~/server/middleware/limiters');
@@ -36,4 +37,12 @@ router.get('/:toolId/auth', verifyToolAuth);
*/
router.post('/:toolId/call', toolCallLimiter, callTool);
/**
* Add a new tool to the system
* @route POST /agents/tools/add
* @param {object} req.body - Request body containing tool data
* @returns {object} Created tool object
*/
router.post('/add', addTool);
module.exports = router;

View File

@@ -1,4 +1,7 @@
const { klona } = require('klona');
const { sleep } = require('@librechat/agents');
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
StepTypes,
RunStatus,
@@ -11,11 +14,10 @@ const {
} = require('librechat-data-provider');
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
const { processRequiredActions } = require('~/server/services/ToolService');
const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
const { RunManager, waitForRun } = require('~/server/services/Runs');
const { processMessages } = require('~/server/services/Threads');
const { createOnProgress } = require('~/server/utils');
const { TextStream } = require('~/app/clients');
const { logger } = require('~/config');
/**
* Sorts, processes, and flattens messages to a single string.
@@ -64,7 +66,7 @@ async function createOnTextProgress({
};
logger.debug('Content data:', contentData);
sendMessage(openai.res, contentData);
sendEvent(openai.res, contentData);
};
}

View File

@@ -1,5 +1,6 @@
const { isUserProvided } = require('@librechat/api');
const { EModelEndpoint } = require('librechat-data-provider');
const { isUserProvided, generateConfig } = require('~/server/utils');
const { generateConfig } = require('~/server/utils/handleText');
const {
OPENAI_API_KEY: openAIApiKey,

View File

@@ -1,18 +1,18 @@
const path = require('path');
const {
CacheKeys,
configSchema,
EImageOutputType,
validateSettingDefinitions,
agentParamSettings,
paramSettings,
} = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const loadYaml = require('~/utils/loadYaml');
const { logger } = require('~/config');
const axios = require('axios');
const yaml = require('js-yaml');
const keyBy = require('lodash/keyBy');
const { loadYaml } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
CacheKeys,
configSchema,
paramSettings,
EImageOutputType,
agentParamSettings,
validateSettingDefinitions,
} = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');

View File

@@ -1,6 +1,9 @@
jest.mock('axios');
jest.mock('~/cache/getLogStores');
jest.mock('~/utils/loadYaml');
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
loadYaml: jest.fn(),
}));
jest.mock('librechat-data-provider', () => {
const actual = jest.requireActual('librechat-data-provider');
return {
@@ -30,11 +33,22 @@ jest.mock('librechat-data-provider', () => {
};
});
jest.mock('@librechat/data-schemas', () => {
return {
logger: {
info: jest.fn(),
warn: jest.fn(),
debug: jest.fn(),
error: jest.fn(),
},
};
});
const axios = require('axios');
const { loadYaml } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const loadCustomConfig = require('./loadCustomConfig');
const getLogStores = require('~/cache/getLogStores');
const loadYaml = require('~/utils/loadYaml');
const { logger } = require('~/config');
describe('loadCustomConfig', () => {
const mockSet = jest.fn();

View File

@@ -0,0 +1,93 @@
const initializeClient = require('./initialize');
jest.mock('@librechat/api', () => ({
resolveHeaders: jest.fn(),
getOpenAIConfig: jest.fn(),
createHandleLLMNewToken: jest.fn(),
}));
jest.mock('librechat-data-provider', () => ({
CacheKeys: { TOKEN_CONFIG: 'token_config' },
ErrorTypes: { NO_USER_KEY: 'NO_USER_KEY', NO_BASE_URL: 'NO_BASE_URL' },
envVarRegex: /\$\{([^}]+)\}/,
FetchTokenConfig: {},
extractEnvVariable: jest.fn((value) => value),
}));
jest.mock('@librechat/agents', () => ({
Providers: { OLLAMA: 'ollama' },
}));
jest.mock('~/server/services/UserService', () => ({
getUserKeyValues: jest.fn(),
checkUserKeyExpiry: jest.fn(),
}));
jest.mock('~/server/services/Config', () => ({
getCustomEndpointConfig: jest.fn().mockResolvedValue({
apiKey: 'test-key',
baseURL: 'https://test.com',
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
models: { default: ['test-model'] },
}),
}));
jest.mock('~/server/services/ModelService', () => ({
fetchModels: jest.fn(),
}));
jest.mock('~/app/clients/OpenAIClient', () => {
return jest.fn().mockImplementation(() => ({
options: {},
}));
});
jest.mock('~/server/utils', () => ({
isUserProvided: jest.fn().mockReturnValue(false),
}));
jest.mock('~/cache/getLogStores', () =>
jest.fn().mockReturnValue({
get: jest.fn(),
}),
);
describe('custom/initializeClient', () => {
const mockRequest = {
body: { endpoint: 'test-endpoint' },
user: { id: 'user-123', email: 'test@example.com' },
app: { locals: {} },
};
const mockResponse = {};
beforeEach(() => {
jest.clearAllMocks();
});
it('calls resolveHeaders with headers and user', async () => {
const { resolveHeaders } = require('@librechat/api');
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
expect(resolveHeaders).toHaveBeenCalledWith(
{ 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
{ id: 'user-123', email: 'test@example.com' },
);
});
it('throws if endpoint config is missing', async () => {
const { getCustomEndpointConfig } = require('~/server/services/Config');
getCustomEndpointConfig.mockResolvedValueOnce(null);
await expect(
initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true }),
).rejects.toThrow('Config not found for the test-endpoint custom endpoint.');
});
it('throws if user is missing', async () => {
await expect(
initializeClient({
req: { ...mockRequest, user: undefined },
res: mockResponse,
optionsOnly: true,
}),
).rejects.toThrow("Cannot read properties of undefined (reading 'id')");
});
});

View File

@@ -1,7 +1,6 @@
const { getGoogleConfig, isEnabled } = require('@librechat/api');
const { EModelEndpoint, AuthKeys } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/google/llm');
const { isEnabled } = require('~/server/utils');
const { GoogleClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
@@ -65,7 +64,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
if (overrideModel) {
clientOptions.modelOptions.model = overrideModel;
}
return getLLMConfig(credentials, clientOptions);
return getGoogleConfig(credentials, clientOptions);
}
const client = new GoogleClient(credentials, clientOptions);

View File

@@ -1,3 +1,6 @@
const { sleep } = require('@librechat/agents');
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const {
Constants,
StepTypes,
@@ -8,9 +11,8 @@ const {
} = require('librechat-data-provider');
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
const { processRequiredActions } = require('~/server/services/ToolService');
const { createOnProgress, sendMessage, sleep } = require('~/server/utils');
const { processMessages } = require('~/server/services/Threads');
const { logger } = require('~/config');
const { createOnProgress } = require('~/server/utils');
/**
* Implements the StreamRunManager functionality for managing the streaming
@@ -126,7 +128,7 @@ class StreamRunManager {
conversationId: this.finalMessage.conversationId,
};
sendMessage(this.res, contentData);
sendEvent(this.res, contentData);
}
/* <------------------ Misc. Helpers ------------------> */
@@ -302,7 +304,7 @@ class StreamRunManager {
for (const d of delta[key]) {
if (typeof d === 'object' && !Object.prototype.hasOwnProperty.call(d, 'index')) {
logger.warn('Expected an object with an \'index\' for array updates but got:', d);
logger.warn("Expected an object with an 'index' for array updates but got:", d);
continue;
}

View File

@@ -7,9 +7,9 @@ const {
defaultAssistantsVersion,
defaultAgentCapabilities,
} = require('librechat-data-provider');
const { sendEvent } = require('@librechat/api');
const { Providers } = require('@librechat/agents');
const partialRight = require('lodash/partialRight');
const { sendMessage } = require('./streamResponse');
/** Helper function to escape special characters in regex
* @param {string} string - The string to escape.
@@ -37,7 +37,7 @@ const createOnProgress = (
basePayload.text = basePayload.text + chunk;
const payload = Object.assign({}, basePayload, rest);
sendMessage(res, payload);
sendEvent(res, payload);
if (_onProgress) {
_onProgress(payload);
}
@@ -50,7 +50,7 @@ const createOnProgress = (
const sendIntermediateMessage = (res, payload, extraTokens = '') => {
basePayload.text = basePayload.text + extraTokens;
const message = Object.assign({}, basePayload, payload);
sendMessage(res, message);
sendEvent(res, message);
if (i === 0) {
basePayload.initial = false;
}

View File

@@ -1,11 +1,9 @@
const streamResponse = require('./streamResponse');
const removePorts = require('./removePorts');
const countTokens = require('./countTokens');
const handleText = require('./handleText');
const sendEmail = require('./sendEmail');
const queue = require('./queue');
const files = require('./files');
const math = require('./math');
/**
* Check if email configuration is set
@@ -28,7 +26,6 @@ function checkEmailConfig() {
}
module.exports = {
...streamResponse,
checkEmailConfig,
...handleText,
countTokens,
@@ -36,5 +33,4 @@ module.exports = {
sendEmail,
...files,
...queue,
math,
};

View File

@@ -1,11 +1,9 @@
const loadYaml = require('./loadYaml');
const tokenHelpers = require('./tokens');
const deriveBaseURL = require('./deriveBaseURL');
const extractBaseURL = require('./extractBaseURL');
const findMessageContent = require('./findMessageContent');
module.exports = {
loadYaml,
deriveBaseURL,
extractBaseURL,
...tokenHelpers,

View File

@@ -1,13 +0,0 @@
const fs = require('fs');
const yaml = require('js-yaml');
function loadYaml(filepath) {
try {
let fileContents = fs.readFileSync(filepath, 'utf8');
return yaml.load(fileContents);
} catch (e) {
return e;
}
}
module.exports = loadYaml;

View File

@@ -6,6 +6,7 @@ import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-quer
import type { TUpdateUserPlugins } from 'librechat-data-provider';
import { Button, Input, Label } from '~/components/ui';
import { useGetStartupConfig } from '~/data-provider';
import { useAddToolMutation } from '~/data-provider/Tools';
import MCPPanelSkeleton from './MCPPanelSkeleton';
import { useToastContext } from '~/Providers';
import { useLocalize } from '~/hooks';
@@ -17,6 +18,12 @@ interface ServerConfigWithVars {
};
}
interface AddToolFormData {
name: string;
description: string;
type: 'function' | 'code_interpreter' | 'file_search';
}
export default function MCPPanel() {
const localize = useLocalize();
const { showToast } = useToastContext();
@@ -24,6 +31,7 @@ export default function MCPPanel() {
const [selectedServerNameForEditing, setSelectedServerNameForEditing] = useState<string | null>(
null,
);
const [showAddToolForm, setShowAddToolForm] = useState(true);
const mcpServerDefinitions = useMemo(() => {
if (!startupConfig?.mcpServers) {
@@ -87,19 +95,25 @@ export default function MCPPanel() {
const handleGoBackToList = () => {
setSelectedServerNameForEditing(null);
setShowAddToolForm(false);
};
const handleShowAddToolForm = () => {
setShowAddToolForm(true);
setSelectedServerNameForEditing(null);
};
if (startupConfigLoading) {
return <MCPPanelSkeleton />;
}
if (mcpServerDefinitions.length === 0) {
return (
<div className="p-4 text-center text-sm text-gray-500">
{localize('com_sidepanel_mcp_no_servers_with_vars')}
</div>
);
}
// if (mcpServerDefinitions.length === 0) {
// return (
// <div className="p-4 text-center text-sm text-gray-500">
// {localize('com_sidepanel_mcp_no_servers_with_vars')}
// </div>
// );
// }
if (selectedServerNameForEditing) {
// Editing View
@@ -138,10 +152,32 @@ export default function MCPPanel() {
/>
</div>
);
} else if (showAddToolForm) {
// Add Tool Form View
return (
<div className="h-auto max-w-full overflow-x-hidden p-3">
<Button
variant="outline"
onClick={handleGoBackToList}
className="mb-3 flex items-center px-3 py-2 text-sm"
>
<ChevronLeft className="mr-1 h-4 w-4" />
{localize('com_ui_back')}
</Button>
<h3 className="mb-3 text-lg font-medium">{localize('com_ui_add_tool')}</h3>
<AddToolForm onCancel={handleGoBackToList} />
</div>
);
} else {
// Server List View
return (
<div className="h-auto max-w-full overflow-x-hidden p-3">
<div className="mb-3 flex items-center justify-between">
<h3 className="text-lg font-medium">{localize('com_ui_mcp_servers')}</h3>
<Button variant="outline" onClick={handleShowAddToolForm} className="text-sm">
{localize('com_ui_add_tool')}
</Button>
</div>
<div className="space-y-2">
{mcpServerDefinitions.map((server) => (
<Button
@@ -251,3 +287,131 @@ function MCPVariableEditor({ server, onSave, onRevoke, isSubmitting }: MCPVariab
</form>
);
}
interface AddToolFormProps {
onCancel: () => void;
}
function AddToolForm({ onCancel }: AddToolFormProps) {
const localize = useLocalize();
const { showToast } = useToastContext();
const addToolMutation = useAddToolMutation({
onSuccess: (data) => {
showToast({
message: localize('com_ui_tool_added_success', { '0': data.function?.name || 'Unknown' }),
status: 'success',
});
onCancel();
},
onError: (error) => {
console.error('Error adding tool:', error);
showToast({
message: localize('com_ui_tool_add_error'),
status: 'error',
});
},
});
const {
control,
handleSubmit,
formState: { errors, isDirty },
} = useForm<AddToolFormData>({
defaultValues: {
name: '',
description: '',
type: 'function',
},
});
const onFormSubmit = (data: AddToolFormData) => {
addToolMutation.mutate(data);
};
return (
<form onSubmit={handleSubmit(onFormSubmit)} className="mb-4 mt-2 space-y-4">
<div className="space-y-2">
<Label htmlFor="tool-name" className="text-sm font-medium">
{localize('com_ui_tool_name')}
</Label>
<Controller
name="name"
control={control}
rules={{ required: localize('com_ui_tool_name_required') }}
render={({ field }) => (
<Input
id="tool-name"
type="text"
{...field}
placeholder={localize('com_ui_enter_tool_name')}
className="w-full rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 dark:border-gray-600 dark:bg-gray-700 dark:text-white sm:text-sm"
/>
)}
/>
{errors.name && <p className="text-xs text-red-500">{errors.name.message}</p>}
</div>
<div className="space-y-2">
<Label htmlFor="tool-description" className="text-sm font-medium">
{localize('com_ui_description')}
</Label>
<Controller
name="description"
control={control}
rules={{ required: localize('com_ui_description_required') }}
render={({ field }) => (
<Input
id="tool-description"
type="text"
{...field}
placeholder={localize('com_ui_enter_description')}
className="w-full rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 dark:border-gray-600 dark:bg-gray-700 dark:text-white sm:text-sm"
/>
)}
/>
{errors.description && <p className="text-xs text-red-500">{errors.description.message}</p>}
</div>
<div className="space-y-2">
<Label htmlFor="tool-type" className="text-sm font-medium">
{localize('com_ui_tool_type')}
</Label>
<Controller
name="type"
control={control}
render={({ field }) => (
<select
id="tool-type"
{...field}
className="w-full rounded-md border-gray-300 shadow-sm focus:border-indigo-500 focus:ring-indigo-500 dark:border-gray-600 dark:bg-gray-700 dark:text-white sm:text-sm"
>
<option value="function">{localize('com_ui_function')}</option>
<option value="code_interpreter">{localize('com_ui_code_interpreter')}</option>
<option value="file_search">{localize('com_ui_file_search')}</option>
</select>
)}
/>
{errors.type && <p className="text-xs text-red-500">{errors.type.message}</p>}
</div>
<div className="flex justify-end gap-2 pt-2">
<Button
type="button"
variant="outline"
onClick={onCancel}
disabled={addToolMutation.isLoading}
>
{localize('com_ui_cancel')}
</Button>
<Button
type="submit"
className="bg-green-500 text-white hover:bg-green-600"
disabled={addToolMutation.isLoading || !isDirty}
>
{addToolMutation.isLoading ? localize('com_ui_saving') : localize('com_ui_save')}
</Button>
</div>
</form>
);
}

View File

@@ -46,6 +46,10 @@ function DynamicInput({
setInputValue(e, !isNaN(Number(e.target.value)));
};
const placeholderText = placeholderCode
? localize(placeholder as TranslationKeys) || placeholder
: placeholder;
return (
<div
className={`flex flex-col items-center justify-start gap-6 ${
@@ -76,11 +80,7 @@ function DynamicInput({
disabled={readonly}
value={inputValue ?? defaultValue ?? ''}
onChange={handleInputChange}
placeholder={
placeholderCode
? localize(placeholder as TranslationKeys) || placeholder
: placeholder
}
placeholder={placeholderText}
className={cn(
'flex h-10 max-h-10 w-full resize-none border-none bg-surface-secondary px-3 py-2',
)}

View File

@@ -29,7 +29,7 @@ function OptionHover({
<HoverCardPortal>
<HoverCardContent side={side} className={`z-[999] w-80 ${className}`} sideOffset={sideOffset}>
<div className="space-y-2">
<p className="text-sm text-gray-600 dark:text-gray-300">{text}</p>
<p className="whitespace-pre-wrap text-sm text-text-secondary">{text}</p>
</div>
</HoverCardContent>
</HoverCardPortal>

View File

@@ -40,3 +40,44 @@ export const useToolCallMutation = <T extends t.ToolId>(
},
);
};
/**
* Interface for creating a new tool
*/
interface CreateToolData {
name: string;
description: string;
type: 'function' | 'code_interpreter' | 'file_search';
metadata?: Record<string, unknown>;
}
/**
* Mutation hook for adding a new tool to the system
* Note: Requires corresponding backend implementation of dataService.createTool
*/
export const useAddToolMutation = (
// options?:
// {
// onMutate?: (variables: CreateToolData) => void | Promise<unknown>;
// onError?: (error: Error, variables: CreateToolData, context: unknown) => void;
// onSuccess?: (data: t.Tool, variables: CreateToolData, context: unknown) => void;
// }
options?: t.MutationOptions<Record<string, unknown>, CreateToolData>,
): UseMutationResult<Record<string, unknown>, Error, CreateToolData> => {
const queryClient = useQueryClient();
return useMutation(
(toolData: CreateToolData) => {
return dataService.createTool(toolData);
},
{
onMutate: (variables) => options?.onMutate?.(variables),
onError: (error, variables, context) => options?.onError?.(error, variables, context),
onSuccess: (data, variables, context) => {
// Invalidate tools list to trigger refetch
queryClient.invalidateQueries([QueryKeys.tools]);
return options?.onSuccess?.(data, variables, context);
},
},
);
};

View File

@@ -194,7 +194,7 @@ export const useConversationTagsQuery = (
/**
* Hook for getting all available tools for Assistants
*/
export const useAvailableToolsQuery = <TData = t.TPlugin[]>(
export const useAvailableToolsQuery = <TData = t.TPlugin[]>( // <-- this one
endpoint: t.AssistantsEndpoint | EModelEndpoint.agents,
config?: UseQueryOptions<t.TPlugin[], unknown, TData>,
): QueryObserverResult<TData> => {

View File

@@ -152,20 +152,20 @@ export default function useSideNavLinks({
});
}
if (
startupConfig?.mcpServers &&
Object.values(startupConfig.mcpServers).some(
(server) => server.customUserVars && Object.keys(server.customUserVars).length > 0,
)
) {
links.push({
title: 'com_nav_setting_mcp',
label: '',
icon: MCPIcon,
id: 'mcp-settings',
Component: MCPPanel,
});
}
// if (
// startupConfig?.mcpServers &&
// Object.values(startupConfig.mcpServers).some(
// (server) => server.customUserVars && Object.keys(server.customUserVars).length > 0,
// )
// ) {
links.push({
title: 'com_nav_setting_mcp',
label: '',
icon: MCPIcon,
id: 'mcp-settings',
Component: MCPPanel,
});
// }
links.push({
title: 'com_sidepanel_hide_panel',

View File

@@ -207,6 +207,8 @@
"com_endpoint_google_temp": "Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.",
"com_endpoint_google_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).",
"com_endpoint_google_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.",
"com_endpoint_google_thinking": "Enables or disables reasoning. This setting is only supported by certain models (2.5 series). For older models, this setting may have no effect.",
"com_endpoint_google_thinking_budget": "Guides the number of thinking tokens the model uses. The actual amount may exceed or fall below this value depending on the prompt.\n\nThis setting is only supported by certain models (2.5 series). Gemini 2.5 Pro supports 128-32,768 tokens. Gemini 2.5 Flash supports 0-24,576 tokens. Gemini 2.5 Flash Lite supports 512-24,576 tokens.\n\nLeave blank or set to \"-1\" to let the model automatically decide when and how much to think. By default, Gemini 2.5 Flash Lite does not think.",
"com_endpoint_instructions_assistants": "Override Instructions",
"com_endpoint_instructions_assistants_placeholder": "Overrides the instructions of the assistant. This is useful for modifying the behavior on a per-run basis.",
"com_endpoint_max_output_tokens": "Max Output Tokens",
@@ -582,6 +584,7 @@
"com_ui_auth_url": "Authorization URL",
"com_ui_authentication": "Authentication",
"com_ui_authentication_type": "Authentication Type",
"com_ui_auto": "Auto",
"com_ui_available_tools": "Available Tools",
"com_ui_avatar": "Avatar",
"com_ui_azure": "Azure",

View File

@@ -73,6 +73,8 @@ interface:
bookmarks: true
multiConvo: true
agents: true
# Temporary chat retention period in hours (default: 720, min: 1, max: 8760)
# temporaryChatRetention: 1
# Example Cloudflare turnstile (optional)
#turnstile:

65
package-lock.json generated
View File

@@ -64,7 +64,7 @@
"@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.42",
"@librechat/agents": "^2.4.46",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@node-saml/passport-saml": "^5.0.0",
@@ -1351,33 +1351,6 @@
}
}
},
"api/node_modules/@librechat/agents": {
"version": "2.4.42",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.42.tgz",
"integrity": "sha512-52ux2PeEAV79yr6/h6GN3omlpqX6H0FYl6qwjJ6gT04MMko/imnLd3bQrX0gm3i0KL5ygHbRjQeonONKjJayHw==",
"license": "MIT",
"dependencies": {
"@langchain/anthropic": "^0.3.23",
"@langchain/aws": "^0.1.11",
"@langchain/community": "^0.3.47",
"@langchain/core": "^0.3.60",
"@langchain/deepseek": "^0.0.2",
"@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13",
"@langchain/langgraph": "^0.3.4",
"@langchain/mistralai": "^0.2.1",
"@langchain/ollama": "^0.2.3",
"@langchain/openai": "^0.5.14",
"@langchain/xai": "^0.0.3",
"cheerio": "^1.0.0",
"dotenv": "^16.4.7",
"https-proxy-agent": "^7.0.6",
"nanoid": "^3.3.7"
},
"engines": {
"node": ">=14.0.0"
}
},
"api/node_modules/@smithy/abort-controller": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz",
@@ -19153,9 +19126,9 @@
}
},
"node_modules/@langchain/langgraph": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.3.4.tgz",
"integrity": "sha512-Vuja8Qtu3Zjx7k4fK7Cnw+p8gtvIRPciWp9btPhAs3aUo6aBgOJOZVcK5Ii3mHfEHK/aQmRElR0x/u/YwykOrg==",
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.3.5.tgz",
"integrity": "sha512-7astlgnp6BdMQJqmr+cbDgR10FYWNCaDLnbfEDHpqhKCCajU59m5snOdl4Vtu5UM6V2k3lgatNqWoflBtxhIyg==",
"license": "MIT",
"dependencies": {
"@langchain/langgraph-checkpoint": "~0.0.18",
@@ -19463,11 +19436,10 @@
}
},
"node_modules/@librechat/agents": {
"version": "2.4.41",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.41.tgz",
"integrity": "sha512-kYmdk5WVRp0qZxTx6BuGCs4l0Ir9iBLLx4ZY4/1wxr80al5/vq3P8wbgGdKMeO2qTu4ZaT4RyWRQYWBg5HDkUQ==",
"version": "2.4.46",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.46.tgz",
"integrity": "sha512-zR27U19/WGF3HN64oBbiaFgjjWHaF7BjYzRFWzQKEkk+iEzCe59IpuEZUizQ54YcY02nhhh6S3MNUjhAJwMYVA==",
"license": "MIT",
"peer": true,
"dependencies": {
"@langchain/anthropic": "^0.3.23",
"@langchain/aws": "^0.1.11",
@@ -19495,7 +19467,6 @@
"resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.47.tgz",
"integrity": "sha512-Vo42kAfkXpTFSevhEkeqqE55az8NyQgDktCbitXYuhipNbFYx08XVvqEDkFkB20MM/Z7u+cvLb+DxCqnKuH0CQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"@langchain/openai": ">=0.2.0 <0.6.0",
"@langchain/weaviate": "^0.2.0",
@@ -20017,11 +19988,10 @@
}
},
"node_modules/@librechat/agents/node_modules/@langchain/openai": {
"version": "0.5.14",
"resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz",
"integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==",
"version": "0.5.15",
"resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.15.tgz",
"integrity": "sha512-ANadEHyAj5sufQpz+SOPpKbyoMcTLhnh8/d+afbSPUqWsIMPpEFX3HoSY3nrBPG6l4NQQNG5P5oHb4SdC8+YIg==",
"license": "MIT",
"peer": true,
"dependencies": {
"js-tiktoken": "^1.0.12",
"openai": "^5.3.0",
@@ -20039,7 +20009,6 @@
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz",
"integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">= 14"
}
@@ -20049,7 +20018,6 @@
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
"integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
"license": "MIT",
"peer": true,
"dependencies": {
"agent-base": "^7.1.2",
"debug": "4"
@@ -20059,11 +20027,10 @@
}
},
"node_modules/@librechat/agents/node_modules/openai": {
"version": "5.5.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz",
"integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==",
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-5.7.0.tgz",
"integrity": "sha512-zXWawZl6J/P5Wz57/nKzVT3kJQZvogfuyuNVCdEp4/XU2UNrjL7SsuNpWAyLZbo6HVymwmnfno9toVzBhelygA==",
"license": "Apache-2.0",
"peer": true,
"bin": {
"openai": "bin/cli"
},
@@ -20089,7 +20056,6 @@
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"peer": true,
"bin": {
"uuid": "dist/bin/uuid"
}
@@ -46568,7 +46534,7 @@
},
"packages/api": {
"name": "@librechat/api",
"version": "1.2.4",
"version": "1.2.5",
"license": "ISC",
"devDependencies": {
"@babel/preset-env": "^7.21.5",
@@ -46600,13 +46566,14 @@
"typescript": "^5.0.4"
},
"peerDependencies": {
"@librechat/agents": "^2.4.41",
"@librechat/agents": "^2.4.46",
"@librechat/data-schemas": "*",
"@modelcontextprotocol/sdk": "^1.12.3",
"axios": "^1.8.2",
"diff": "^7.0.0",
"eventsource": "^3.0.2",
"express": "^4.21.2",
"js-yaml": "^4.1.0",
"keyv": "^5.3.2",
"librechat-data-provider": "*",
"node-fetch": "2.7.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@librechat/api",
"version": "1.2.4",
"version": "1.2.5",
"type": "commonjs",
"description": "MCP services for LibreChat",
"main": "dist/index.js",
@@ -69,13 +69,14 @@
"registry": "https://registry.npmjs.org/"
},
"peerDependencies": {
"@librechat/agents": "^2.4.41",
"@librechat/agents": "^2.4.46",
"@librechat/data-schemas": "*",
"@modelcontextprotocol/sdk": "^1.12.3",
"axios": "^1.8.2",
"diff": "^7.0.0",
"eventsource": "^3.0.2",
"express": "^4.21.2",
"js-yaml": "^4.1.0",
"keyv": "^5.3.2",
"librechat-data-provider": "*",
"node-fetch": "2.7.0",

View File

@@ -46,7 +46,10 @@ export async function createRun({
customHandlers?: Record<GraphEvents, EventHandler>;
}): Promise<Run<IState>> {
const provider =
providerEndpointMap[agent.provider as keyof typeof providerEndpointMap] ?? agent.provider;
(providerEndpointMap[
agent.provider as keyof typeof providerEndpointMap
] as unknown as Providers) ?? agent.provider;
const llmConfig: t.RunLLMConfig = Object.assign(
{
provider,
@@ -66,7 +69,9 @@ export async function createRun({
}
let reasoningKey: 'reasoning_content' | 'reasoning' | undefined;
if (
if (provider === Providers.GOOGLE) {
reasoningKey = 'reasoning';
} else if (
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {

View File

@@ -0,0 +1 @@
export * from './llm';

View File

@@ -1,13 +1,15 @@
const { Providers } = require('@librechat/agents');
const { AuthKeys } = require('librechat-data-provider');
const { isEnabled } = require('~/server/utils');
import { Providers } from '@librechat/agents';
import { googleSettings, AuthKeys } from 'librechat-data-provider';
import type { GoogleClientOptions, VertexAIClientOptions } from '@librechat/agents';
import type * as t from '~/types';
import { isEnabled } from '~/utils';
function getThresholdMapping(model) {
function getThresholdMapping(model: string) {
const gemini1Pattern = /gemini-(1\.0|1\.5|pro$|1\.0-pro|1\.5-pro|1\.5-flash-001)/;
const restrictedPattern = /(gemini-(1\.5-flash-8b|2\.0|exp)|learnlm)/;
if (gemini1Pattern.test(model)) {
return (value) => {
return (value: string) => {
if (value === 'OFF') {
return 'BLOCK_NONE';
}
@@ -16,7 +18,7 @@ function getThresholdMapping(model) {
}
if (restrictedPattern.test(model)) {
return (value) => {
return (value: string) => {
if (value === 'OFF' || value === 'HARM_BLOCK_THRESHOLD_UNSPECIFIED') {
return 'BLOCK_NONE';
}
@@ -24,19 +26,16 @@ function getThresholdMapping(model) {
};
}
return (value) => value;
return (value: string) => value;
}
/**
*
* @param {string} model
* @returns {Array<{category: string, threshold: string}> | undefined}
*/
function getSafetySettings(model) {
export function getSafetySettings(
model?: string,
): Array<{ category: string; threshold: string }> | undefined {
if (isEnabled(process.env.GOOGLE_EXCLUDE_SAFETY_SETTINGS)) {
return undefined;
}
const mapThreshold = getThresholdMapping(model);
const mapThreshold = getThresholdMapping(model ?? '');
return [
{
@@ -74,24 +73,27 @@ function getSafetySettings(model) {
* Replicates core logic from GoogleClient's constructor and setOptions, plus client determination.
* Returns an object with the provider label and the final options that would be passed to createLLM.
*
* @param {string | object} credentials - Either a JSON string or an object containing Google keys
* @param {object} [options={}] - The same shape as the "GoogleClient" constructor options
* @param credentials - Either a JSON string or an object containing Google keys
* @param options - The same shape as the "GoogleClient" constructor options
*/
function getLLMConfig(credentials, options = {}) {
// 1. Parse credentials
let creds = {};
export function getGoogleConfig(
credentials: string | t.GoogleCredentials | undefined,
options: t.GoogleConfigOptions = {},
) {
let creds: t.GoogleCredentials = {};
if (typeof credentials === 'string') {
try {
creds = JSON.parse(credentials);
} catch (err) {
throw new Error(`Error parsing string credentials: ${err.message}`);
} catch (err: unknown) {
throw new Error(
`Error parsing string credentials: ${err instanceof Error ? err.message : 'Unknown error'}`,
);
}
} else if (credentials && typeof credentials === 'object') {
creds = credentials;
}
// Extract from credentials
const serviceKeyRaw = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {};
const serviceKey =
typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : (serviceKeyRaw ?? {});
@@ -102,9 +104,15 @@ function getLLMConfig(credentials, options = {}) {
const reverseProxyUrl = options.reverseProxyUrl;
const authHeader = options.authHeader;
/** @type {GoogleClientOptions | VertexAIClientOptions} */
let llmConfig = {
...(options.modelOptions || {}),
const {
thinking = googleSettings.thinking.default,
thinkingBudget = googleSettings.thinkingBudget.default,
...modelOptions
} = options.modelOptions || {};
const llmConfig: GoogleClientOptions | VertexAIClientOptions = {
...(modelOptions || {}),
model: modelOptions?.model ?? '',
maxRetries: 2,
};
@@ -121,16 +129,30 @@ function getLLMConfig(credentials, options = {}) {
// If we have a GCP project => Vertex AI
if (project_id && provider === Providers.VERTEXAI) {
/** @type {VertexAIClientOptions['authOptions']} */
llmConfig.authOptions = {
(llmConfig as VertexAIClientOptions).authOptions = {
credentials: { ...serviceKey },
projectId: project_id,
};
llmConfig.location = process.env.GOOGLE_LOC || 'us-central1';
(llmConfig as VertexAIClientOptions).location = process.env.GOOGLE_LOC || 'us-central1';
} else if (apiKey && provider === Providers.GOOGLE) {
llmConfig.apiKey = apiKey;
}
const shouldEnableThinking =
thinking && thinkingBudget != null && (thinkingBudget > 0 || thinkingBudget === -1);
if (shouldEnableThinking && provider === Providers.GOOGLE) {
(llmConfig as GoogleClientOptions).thinkingConfig = {
thinkingBudget: thinking ? thinkingBudget : googleSettings.thinkingBudget.default,
includeThoughts: Boolean(thinking),
};
} else if (shouldEnableThinking && provider === Providers.VERTEXAI) {
(llmConfig as VertexAIClientOptions).thinkingBudget = thinking
? thinkingBudget
: googleSettings.thinkingBudget.default;
(llmConfig as VertexAIClientOptions).includeThoughts = Boolean(thinking);
}
/*
let legacyOptions = {};
// Filter out any "examples" that are empty
@@ -152,11 +174,11 @@ function getLLMConfig(credentials, options = {}) {
*/
if (reverseProxyUrl) {
llmConfig.baseUrl = reverseProxyUrl;
(llmConfig as GoogleClientOptions).baseUrl = reverseProxyUrl;
}
if (authHeader) {
llmConfig.customHeaders = {
(llmConfig as GoogleClientOptions).customHeaders = {
Authorization: `Bearer ${apiKey}`,
};
}
@@ -169,8 +191,3 @@ function getLLMConfig(credentials, options = {}) {
llmConfig,
};
}
module.exports = {
getLLMConfig,
getSafetySettings,
};

View File

@@ -1 +1,2 @@
export * from './google';
export * from './openai';

View File

@@ -1,9 +1,9 @@
import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider';
import type {
LLMConfigOptions,
UserKeyValues,
InitializeOpenAIOptionsParams,
OpenAIOptionsResult,
OpenAIConfigOptions,
InitializeOpenAIOptionsParams,
} from '~/types';
import { createHandleLLMNewToken } from '~/utils/generators';
import { getAzureCredentials } from '~/utils/azure';
@@ -64,7 +64,7 @@ export const initializeOpenAI = async ({
? userValues?.baseURL
: baseURLOptions[endpoint as keyof typeof baseURLOptions];
const clientOptions: LLMConfigOptions = {
const clientOptions: OpenAIConfigOptions = {
proxy: PROXY ?? undefined,
reverseProxyUrl: baseURL || undefined,
streaming: true,
@@ -135,7 +135,7 @@ export const initializeOpenAI = async ({
user: req.user.id,
};
const finalClientOptions: LLMConfigOptions = {
const finalClientOptions: OpenAIConfigOptions = {
...clientOptions,
modelOptions,
};

View File

@@ -13,7 +13,7 @@ import { isEnabled } from '~/utils/common';
*/
export function getOpenAIConfig(
apiKey: string,
options: t.LLMConfigOptions = {},
options: t.OpenAIConfigOptions = {},
endpoint?: string | null,
): t.LLMConfigResult {
const {

View File

@@ -2,6 +2,7 @@
export * from './mcp/manager';
export * from './mcp/oauth';
export * from './mcp/auth';
export * from './mcp/add';
/* Utilities */
export * from './mcp/utils';
export * from './utils';

View File

@@ -0,0 +1,81 @@
import { Request, Response } from 'express';
import { logger } from '@librechat/data-schemas';
interface CreateToolRequest extends Request {
body: {
name: string;
description: string;
type: 'function' | 'code_interpreter' | 'file_search';
metadata?: Record<string, unknown>;
};
user?: {
id: string;
};
}
/**
* Add a new tool to the system
* @route POST /agents/tools/add
* @param {object} req.body - Request body containing tool data
* @param {string} req.body.name - Tool name
* @param {string} req.body.description - Tool description
* @param {string} req.body.type - Tool type (function, code_interpreter, file_search)
* @param {object} [req.body.metadata] - Optional metadata
* @returns {object} Created tool object
*/
export const addTool = async (req: CreateToolRequest, res: Response) => {
try {
const { name, description, type, metadata } = req.body;
// Log the incoming request for development
logger.info(
'Add Tool Request:' +
JSON.stringify({
name,
description,
type,
metadata,
userId: req.user?.id,
}),
);
// Validate required fields
if (!name || !description || !type) {
return res.status(400).json({
error: 'Missing required fields: name, description, and type are required',
});
}
// Validate tool type
const validTypes = ['function', 'code_interpreter', 'file_search'];
if (!validTypes.includes(type)) {
return res.status(400).json({
error: `Invalid tool type. Must be one of: ${validTypes.join(', ')}`,
});
}
// For now, return a mock successful response
// TODO: Implement actual tool creation logic
const mockTool = {
id: `tool-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
type,
function: {
name,
description,
},
metadata: metadata || {},
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
logger.info('Tool created successfully:' + JSON.stringify(mockTool));
res.status(201).json(mockTool);
} catch (error) {
logger.error('Error adding tool:', error);
res.status(500).json({
error: 'Internal server error while adding tool',
message: error instanceof Error ? error.message : 'Unknown error',
});
}
};

View File

@@ -7,6 +7,7 @@ const RECOGNIZED_PROVIDERS = new Set([
'xai',
'deepseek',
'ollama',
'bedrock',
]);
const CONTENT_ARRAY_PROVIDERS = new Set(['google', 'anthropic', 'openai']);

View File

@@ -0,0 +1,24 @@
import { z } from 'zod';
import { AuthKeys, googleBaseSchema } from 'librechat-data-provider';
export type GoogleParameters = z.infer<typeof googleBaseSchema>;
export type GoogleCredentials = {
[AuthKeys.GOOGLE_SERVICE_KEY]?: string;
[AuthKeys.GOOGLE_API_KEY]?: string;
};
/**
* Configuration options for the getLLMConfig function
*/
export interface GoogleConfigOptions {
modelOptions?: Partial<GoogleParameters>;
reverseProxyUrl?: string;
defaultQuery?: Record<string, string | undefined>;
headers?: Record<string, string>;
proxy?: string;
streaming?: boolean;
authHeader?: boolean;
addParams?: Record<string, unknown>;
dropParams?: string[];
}

View File

@@ -1,5 +1,6 @@
export * from './azure';
export * from './events';
export * from './google';
export * from './mistral';
export * from './openai';
export * from './run';

View File

@@ -9,7 +9,7 @@ export type OpenAIParameters = z.infer<typeof openAISchema>;
/**
* Configuration options for the getLLMConfig function
*/
export interface LLMConfigOptions {
export interface OpenAIConfigOptions {
modelOptions?: Partial<OpenAIParameters>;
reverseProxyUrl?: string;
defaultQuery?: Record<string, string | undefined>;

View File

@@ -1,8 +1,9 @@
import type { AgentModelParameters, EModelEndpoint } from 'librechat-data-provider';
import type { Providers } from '@librechat/agents';
import type { AgentModelParameters } from 'librechat-data-provider';
import type { OpenAIConfiguration } from './openai';
export type RunLLMConfig = {
provider: EModelEndpoint;
provider: Providers;
streaming: boolean;
streamUsage: boolean;
usage?: boolean;

View File

@@ -314,4 +314,116 @@ describe('resolveHeaders', () => {
'Dot-Header': 'dot-value',
});
});
// Additional comprehensive tests for all user field placeholders
it('should replace all allowed user field placeholders', () => {
const user = {
id: 'abc',
name: 'Test User',
username: 'testuser',
email: 'me@example.com',
provider: 'google',
role: 'admin',
googleId: 'gid',
facebookId: 'fbid',
openidId: 'oid',
samlId: 'sid',
ldapId: 'lid',
githubId: 'ghid',
discordId: 'dcid',
appleId: 'aid',
emailVerified: true,
twoFactorEnabled: false,
termsAccepted: true,
};
const headers = {
'X-User-ID': '{{LIBRECHAT_USER_ID}}',
'X-User-Name': '{{LIBRECHAT_USER_NAME}}',
'X-User-Username': '{{LIBRECHAT_USER_USERNAME}}',
'X-User-Email': '{{LIBRECHAT_USER_EMAIL}}',
'X-User-Provider': '{{LIBRECHAT_USER_PROVIDER}}',
'X-User-Role': '{{LIBRECHAT_USER_ROLE}}',
'X-User-GoogleId': '{{LIBRECHAT_USER_GOOGLEID}}',
'X-User-FacebookId': '{{LIBRECHAT_USER_FACEBOOKID}}',
'X-User-OpenIdId': '{{LIBRECHAT_USER_OPENIDID}}',
'X-User-SamlId': '{{LIBRECHAT_USER_SAMLID}}',
'X-User-LdapId': '{{LIBRECHAT_USER_LDAPID}}',
'X-User-GithubId': '{{LIBRECHAT_USER_GITHUBID}}',
'X-User-DiscordId': '{{LIBRECHAT_USER_DISCORDID}}',
'X-User-AppleId': '{{LIBRECHAT_USER_APPLEID}}',
'X-User-EmailVerified': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
'X-User-TwoFactorEnabled': '{{LIBRECHAT_USER_TWOFACTORENABLED}}',
'X-User-TermsAccepted': '{{LIBRECHAT_USER_TERMSACCEPTED}}',
};
const result = resolveHeaders(headers, user);
expect(result['X-User-ID']).toBe('abc');
expect(result['X-User-Name']).toBe('Test User');
expect(result['X-User-Username']).toBe('testuser');
expect(result['X-User-Email']).toBe('me@example.com');
expect(result['X-User-Provider']).toBe('google');
expect(result['X-User-Role']).toBe('admin');
expect(result['X-User-GoogleId']).toBe('gid');
expect(result['X-User-FacebookId']).toBe('fbid');
expect(result['X-User-OpenIdId']).toBe('oid');
expect(result['X-User-SamlId']).toBe('sid');
expect(result['X-User-LdapId']).toBe('lid');
expect(result['X-User-GithubId']).toBe('ghid');
expect(result['X-User-DiscordId']).toBe('dcid');
expect(result['X-User-AppleId']).toBe('aid');
expect(result['X-User-EmailVerified']).toBe('true');
expect(result['X-User-TwoFactorEnabled']).toBe('false');
expect(result['X-User-TermsAccepted']).toBe('true');
});
it('should handle multiple placeholders in one value', () => {
const user = { id: 'abc', email: 'me@example.com' };
const headers = {
'X-Multi': 'User: {{LIBRECHAT_USER_ID}}, Env: ${TEST_API_KEY}, Custom: {{MY_CUSTOM}}',
};
const customVars = { MY_CUSTOM: 'custom-value' };
const result = resolveHeaders(headers, user, customVars);
expect(result['X-Multi']).toBe('User: abc, Env: test-api-key-value, Custom: custom-value');
});
it('should leave unknown placeholders unchanged', () => {
const user = { id: 'abc' };
const headers = {
'X-Unknown': '{{SOMETHING_NOT_RECOGNIZED}}',
'X-Known': '{{LIBRECHAT_USER_ID}}',
};
const result = resolveHeaders(headers, user);
expect(result['X-Unknown']).toBe('{{SOMETHING_NOT_RECOGNIZED}}');
expect(result['X-Known']).toBe('abc');
});
it('should handle a mix of all types', () => {
const user = {
id: 'abc',
email: 'me@example.com',
emailVerified: true,
twoFactorEnabled: false,
};
const headers = {
'X-User': '{{LIBRECHAT_USER_ID}}',
'X-Env': '${TEST_API_KEY}',
'X-Custom': '{{MY_CUSTOM}}',
'X-Multi': 'ID: {{LIBRECHAT_USER_ID}}, ENV: ${TEST_API_KEY}, CUSTOM: {{MY_CUSTOM}}',
'X-Unknown': '{{NOT_A_REAL_PLACEHOLDER}}',
'X-Empty': '',
'X-Boolean': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
};
const customVars = { MY_CUSTOM: 'custom-value' };
const result = resolveHeaders(headers, user, customVars);
expect(result['X-User']).toBe('abc');
expect(result['X-Env']).toBe('test-api-key-value');
expect(result['X-Custom']).toBe('custom-value');
expect(result['X-Multi']).toBe('ID: abc, ENV: test-api-key-value, CUSTOM: custom-value');
expect(result['X-Unknown']).toBe('{{NOT_A_REAL_PLACEHOLDER}}');
expect(result['X-Empty']).toBe('');
expect(result['X-Boolean']).toBe('true');
});
});

View File

@@ -14,3 +14,13 @@ export function sendEvent(res: ServerResponse, event: ServerSentEvent): void {
}
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
}
/**
* Sends error data in Server Sent Events format and ends the response.
* @param res - The server response.
* @param message - The error message.
*/
export function handleError(res: ServerResponse, message: string): void {
res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`);
res.end();
}

View File

@@ -6,5 +6,8 @@ export * from './events';
export * from './files';
export * from './generators';
export * from './llm';
export * from './math';
export * from './openid';
export * from './tempChatRetention';
export { default as Tokenizer } from './tokenizer';
export * from './yaml';

View File

@@ -5,14 +5,14 @@
* If the input is not a string or contains invalid characters, an error is thrown.
* If the evaluated result is not a number, an error is thrown.
*
* @param {string|number} str - The mathematical expression to evaluate, or a number.
* @param {number} [fallbackValue] - The default value to return if the input is not a string or number, or if the evaluated result is not a number.
* @param str - The mathematical expression to evaluate, or a number.
* @param fallbackValue - The default value to return if the input is not a string or number, or if the evaluated result is not a number.
*
* @returns {number} The result of the evaluated expression or the input number.
* @returns The result of the evaluated expression or the input number.
*
* @throws {Error} Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number.
* @throws Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number.
*/
function math(str, fallbackValue) {
export function math(str: string | number, fallbackValue?: number): number {
const fallback = typeof fallbackValue !== 'undefined' && typeof fallbackValue === 'number';
if (typeof str !== 'string' && typeof str === 'number') {
return str;
@@ -43,5 +43,3 @@ function math(str, fallbackValue) {
return value;
}
module.exports = math;

View File

@@ -0,0 +1,133 @@
import {
MIN_RETENTION_HOURS,
MAX_RETENTION_HOURS,
DEFAULT_RETENTION_HOURS,
getTempChatRetentionHours,
createTempChatExpirationDate,
} from './tempChatRetention';
import type { TCustomConfig } from 'librechat-data-provider';
describe('tempChatRetention', () => {
const originalEnv = process.env;
beforeEach(() => {
jest.resetModules();
process.env = { ...originalEnv };
delete process.env.TEMP_CHAT_RETENTION_HOURS;
});
afterAll(() => {
process.env = originalEnv;
});
describe('getTempChatRetentionHours', () => {
it('should return default retention hours when no config or env var is set', () => {
const result = getTempChatRetentionHours();
expect(result).toBe(DEFAULT_RETENTION_HOURS);
});
it('should use environment variable when set', () => {
process.env.TEMP_CHAT_RETENTION_HOURS = '48';
const result = getTempChatRetentionHours();
expect(result).toBe(48);
});
it('should use config value when set', () => {
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 12,
},
};
const result = getTempChatRetentionHours(config);
expect(result).toBe(12);
});
it('should prioritize config over environment variable', () => {
process.env.TEMP_CHAT_RETENTION_HOURS = '48';
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 12,
},
};
const result = getTempChatRetentionHours(config);
expect(result).toBe(12);
});
it('should enforce minimum retention period', () => {
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 0,
},
};
const result = getTempChatRetentionHours(config);
expect(result).toBe(MIN_RETENTION_HOURS);
});
it('should enforce maximum retention period', () => {
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 10000,
},
};
const result = getTempChatRetentionHours(config);
expect(result).toBe(MAX_RETENTION_HOURS);
});
it('should handle invalid environment variable', () => {
process.env.TEMP_CHAT_RETENTION_HOURS = 'invalid';
const result = getTempChatRetentionHours();
expect(result).toBe(DEFAULT_RETENTION_HOURS);
});
it('should handle invalid config value', () => {
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 'invalid' as unknown as number,
},
};
const result = getTempChatRetentionHours(config);
expect(result).toBe(DEFAULT_RETENTION_HOURS);
});
});
describe('createTempChatExpirationDate', () => {
it('should create expiration date with default retention period', () => {
const result = createTempChatExpirationDate();
const expectedDate = new Date();
expectedDate.setHours(expectedDate.getHours() + DEFAULT_RETENTION_HOURS);
// Allow for small time differences in test execution
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
});
it('should create expiration date with custom retention period', () => {
const config: Partial<TCustomConfig> = {
interface: {
temporaryChatRetention: 12,
},
};
const result = createTempChatExpirationDate(config);
const expectedDate = new Date();
expectedDate.setHours(expectedDate.getHours() + 12);
// Allow for small time differences in test execution
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
});
it('should return a Date object', () => {
const result = createTempChatExpirationDate();
expect(result).toBeInstanceOf(Date);
});
it('should return a future date', () => {
const now = new Date();
const result = createTempChatExpirationDate();
expect(result.getTime()).toBeGreaterThan(now.getTime());
});
});
});

View File

@@ -0,0 +1,77 @@
import { logger } from '@librechat/data-schemas';
import type { TCustomConfig } from 'librechat-data-provider';
/**
* Default retention period for temporary chats in hours
*/
export const DEFAULT_RETENTION_HOURS = 24 * 30; // 30 days
/**
* Minimum allowed retention period in hours
*/
export const MIN_RETENTION_HOURS = 1;
/**
* Maximum allowed retention period in hours (1 year = 8760 hours)
*/
export const MAX_RETENTION_HOURS = 8760;
/**
* Gets the temporary chat retention period from environment variables or config
* @param config - The custom configuration object
* @returns The retention period in hours
*/
export function getTempChatRetentionHours(config?: Partial<TCustomConfig> | null): number {
let retentionHours = DEFAULT_RETENTION_HOURS;
// Check environment variable first
if (process.env.TEMP_CHAT_RETENTION_HOURS) {
const envValue = parseInt(process.env.TEMP_CHAT_RETENTION_HOURS, 10);
if (!isNaN(envValue)) {
retentionHours = envValue;
} else {
logger.warn(
`Invalid TEMP_CHAT_RETENTION_HOURS environment variable: ${process.env.TEMP_CHAT_RETENTION_HOURS}. Using default: ${DEFAULT_RETENTION_HOURS} hours.`,
);
}
}
// Check config file (takes precedence over environment variable)
if (config?.interface?.temporaryChatRetention !== undefined) {
const configValue = config.interface.temporaryChatRetention;
if (typeof configValue === 'number' && !isNaN(configValue)) {
retentionHours = configValue;
} else {
logger.warn(
`Invalid temporaryChatRetention in config: ${configValue}. Using ${retentionHours} hours.`,
);
}
}
// Validate the retention period
if (retentionHours < MIN_RETENTION_HOURS) {
logger.warn(
`Temporary chat retention period ${retentionHours} is below minimum ${MIN_RETENTION_HOURS} hours. Using minimum value.`,
);
retentionHours = MIN_RETENTION_HOURS;
} else if (retentionHours > MAX_RETENTION_HOURS) {
logger.warn(
`Temporary chat retention period ${retentionHours} exceeds maximum ${MAX_RETENTION_HOURS} hours. Using maximum value.`,
);
retentionHours = MAX_RETENTION_HOURS;
}
return retentionHours;
}
/**
* Creates an expiration date for temporary chats
* @param config - The custom configuration object
* @returns The expiration date
*/
export function createTempChatExpirationDate(config?: Partial<TCustomConfig>): Date {
const retentionHours = getTempChatRetentionHours(config);
const expiredAt = new Date();
expiredAt.setHours(expiredAt.getHours() + retentionHours);
return expiredAt;
}

View File

@@ -0,0 +1,11 @@
import fs from 'fs';
import yaml from 'js-yaml';
export function loadYaml(filepath: string) {
try {
const fileContents = fs.readFileSync(filepath, 'utf8');
return yaml.load(fileContents);
} catch (e) {
return e;
}
}

View File

@@ -510,6 +510,7 @@ export const intefaceSchema = z
prompts: z.boolean().optional(),
agents: z.boolean().optional(),
temporaryChat: z.boolean().optional(),
temporaryChatRetention: z.number().min(1).max(8760).optional(),
runCode: z.boolean().optional(),
webSearch: z.boolean().optional(),
})

View File

@@ -312,6 +312,30 @@ export const getToolCalls = (params: q.GetToolCallParams): Promise<q.ToolCallRes
);
};
export const createTool = (toolData: {
name: string;
description: string;
type: 'function' | 'code_interpreter' | 'file_search';
metadata?: Record<string, unknown>;
}): Promise<{
id: string;
type: string;
function: {
name: string;
description: string;
};
metadata: Record<string, unknown>;
created_at: string;
updated_at: string;
}> => {
return request.post(
endpoints.agents({
path: 'tools/add',
}),
toolData,
);
};
/* Files */
export const getFiles = (): Promise<f.TFile[]> => {

View File

@@ -48,6 +48,7 @@ export enum QueryKeys {
banner = 'banner',
/* Memories */
memories = 'memories',
mcpTools = 'mcpTools',
}
export enum MutationKeys {

View File

@@ -450,6 +450,37 @@ const google: Record<string, SettingDefinition> = {
optionType: 'model',
columnSpan: 2,
},
thinking: {
key: 'thinking',
label: 'com_endpoint_thinking',
labelCode: true,
description: 'com_endpoint_google_thinking',
descriptionCode: true,
type: 'boolean',
default: googleSettings.thinking.default,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
thinkingBudget: {
key: 'thinkingBudget',
label: 'com_endpoint_thinking_budget',
labelCode: true,
description: 'com_endpoint_google_thinking_budget',
descriptionCode: true,
placeholder: 'com_ui_auto',
placeholderCode: true,
type: 'number',
component: 'input',
range: {
min: googleSettings.thinkingBudget.min,
max: googleSettings.thinkingBudget.max,
step: googleSettings.thinkingBudget.step,
},
optionType: 'conversation',
columnSpan: 2,
},
};
const googleConfig: SettingsConfiguration = [
@@ -461,6 +492,8 @@ const googleConfig: SettingsConfiguration = [
google.topP,
google.topK,
librechat.resendFiles,
google.thinking,
google.thinkingBudget,
];
const googleCol1: SettingsConfiguration = [
@@ -476,6 +509,8 @@ const googleCol2: SettingsConfiguration = [
google.topP,
google.topK,
librechat.resendFiles,
google.thinking,
google.thinkingBudget,
];
const openAI: SettingsConfiguration = [

View File

@@ -255,6 +255,18 @@ export const googleSettings = {
step: 1 as const,
default: 40 as const,
},
thinking: {
default: true as const,
},
thinkingBudget: {
min: -1 as const,
max: 32768 as const,
step: 1 as const,
/** `-1` = Dynamic Thinking, meaning the model will adjust
* the budget based on the complexity of the request.
*/
default: -1 as const,
},
};
const ANTHROPIC_MAX_OUTPUT = 128000 as const;
@@ -785,6 +797,8 @@ export const googleBaseSchema = tConversationSchema.pick({
artifacts: true,
topP: true,
topK: true,
thinking: true,
thinkingBudget: true,
iconURL: true,
greeting: true,
spec: true,
@@ -810,6 +824,12 @@ export const googleGenConfigSchema = z
presencePenalty: coerceNumber.optional(),
frequencyPenalty: coerceNumber.optional(),
stopSequences: z.array(z.string()).optional(),
thinkingConfig: z
.object({
includeThoughts: z.boolean().optional(),
thinkingBudget: coerceNumber.optional(),
})
.optional(),
})
.strip()
.optional();

View File

@@ -11,8 +11,6 @@ const formatDate = (date: Date): string => {
// Factory function that takes mongoose instance and returns the methods
export function createMemoryMethods(mongoose: typeof import('mongoose')) {
const MemoryEntry = mongoose.models.MemoryEntry;
/**
* Creates a new memory entry for a user
* Throws an error if a memory with the same key already exists
@@ -28,6 +26,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) {
return { ok: false };
}
const MemoryEntry = mongoose.models.MemoryEntry;
const existingMemory = await MemoryEntry.findOne({ userId, key });
if (existingMemory) {
throw new Error('Memory with this key already exists');
@@ -63,6 +62,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) {
return { ok: false };
}
const MemoryEntry = mongoose.models.MemoryEntry;
await MemoryEntry.findOneAndUpdate(
{ userId, key },
{
@@ -89,6 +89,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) {
*/
async function deleteMemory({ userId, key }: t.DeleteMemoryParams): Promise<t.MemoryResult> {
try {
const MemoryEntry = mongoose.models.MemoryEntry;
const result = await MemoryEntry.findOneAndDelete({ userId, key });
return { ok: !!result };
} catch (error) {
@@ -105,6 +106,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) {
userId: string | Types.ObjectId,
): Promise<t.IMemoryEntryLean[]> {
try {
const MemoryEntry = mongoose.models.MemoryEntry;
return (await MemoryEntry.find({ userId }).lean()) as t.IMemoryEntryLean[];
} catch (error) {
throw new Error(

View File

@@ -1,16 +1,14 @@
import type { DeleteResult, Model } from 'mongoose';
import type { IPluginAuth } from '~/schema/pluginAuth';
import type {
FindPluginAuthsByKeysParams,
UpdatePluginAuthParams,
DeletePluginAuthParams,
FindPluginAuthParams,
IPluginAuth,
} from '~/types';
// Factory function that takes mongoose instance and returns the methods
export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
/**
* Finds a single plugin auth entry by userId and authField
*/
@@ -19,6 +17,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
authField,
}: FindPluginAuthParams): Promise<IPluginAuth | null> {
try {
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
return await PluginAuth.findOne({ userId, authField }).lean();
} catch (error) {
throw new Error(
@@ -39,6 +38,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
return [];
}
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
return await PluginAuth.find({
userId,
pluginKey: { $in: pluginKeys },
@@ -60,6 +60,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
value,
}: UpdatePluginAuthParams): Promise<IPluginAuth> {
try {
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
const existingAuth = await PluginAuth.findOne({ userId, pluginKey, authField }).lean();
if (existingAuth) {
@@ -95,6 +96,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
all = false,
}: DeletePluginAuthParams): Promise<DeleteResult> {
try {
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
if (all) {
const filter: DeletePluginAuthParams = { userId };
if (pluginKey) {
@@ -120,6 +122,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) {
*/
async function deleteAllUserPluginAuths(userId: string): Promise<DeleteResult> {
try {
const PluginAuth: Model<IPluginAuth> = mongoose.models.PluginAuth;
return await PluginAuth.deleteMany({ userId });
} catch (error) {
throw new Error(

View File

@@ -13,14 +13,10 @@ export class SessionError extends Error {
}
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
const expires = REFRESH_TOKEN_EXPIRY
? eval(REFRESH_TOKEN_EXPIRY)
: 1000 * 60 * 60 * 24 * 7; // 7 days default
const expires = REFRESH_TOKEN_EXPIRY ? eval(REFRESH_TOKEN_EXPIRY) : 1000 * 60 * 60 * 24 * 7; // 7 days default
// Factory function that takes mongoose instance and returns the methods
export function createSessionMethods(mongoose: typeof import('mongoose')) {
const Session = mongoose.models.Session;
/**
* Creates a new session for a user
*/
@@ -33,13 +29,14 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
}
try {
const session = new Session({
const Session = mongoose.models.Session;
const currentSession = new Session({
user: userId,
expiration: options.expiration || new Date(Date.now() + expires),
});
const refreshToken = await generateRefreshToken(session);
const refreshToken = await generateRefreshToken(currentSession);
return { session, refreshToken };
return { session: currentSession, refreshToken };
} catch (error) {
logger.error('[createSession] Error creating session:', error);
throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED');
@@ -54,6 +51,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
options: t.SessionQueryOptions = { lean: true },
): Promise<t.ISession | null> {
try {
const Session = mongoose.models.Session;
const query: Record<string, unknown> = {};
if (!params.refreshToken && !params.userId && !params.sessionId) {
@@ -109,6 +107,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
newExpiration?: Date,
): Promise<t.ISession> {
try {
const Session = mongoose.models.Session;
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
if (!sessionDoc) {
@@ -128,6 +127,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
*/
async function deleteSession(params: t.DeleteSessionParams): Promise<{ deletedCount?: number }> {
try {
const Session = mongoose.models.Session;
if (!params.refreshToken && !params.sessionId) {
throw new SessionError(
'Either refreshToken or sessionId is required',
@@ -166,6 +166,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
options: t.DeleteAllSessionsOptions = {},
): Promise<{ deletedCount?: number }> {
try {
const Session = mongoose.models.Session;
if (!userId) {
throw new SessionError('User ID is required', 'INVALID_USER_ID');
}
@@ -237,6 +238,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) {
*/
async function countActiveSessions(userId: string): Promise<number> {
try {
const Session = mongoose.models.Session;
if (!userId) {
throw new SessionError('User ID is required', 'INVALID_USER_ID');
}