Compare commits
73 Commits
feat/agent
...
feat/admin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
278590d0bb | ||
|
|
41a4674469 | ||
|
|
e7a9cf88ac | ||
|
|
f6925f906b | ||
|
|
e90fd1df15 | ||
|
|
a1f9f3dd39 | ||
|
|
fbe0def2fa | ||
|
|
d04da60b3b | ||
|
|
0e94d97bfb | ||
|
|
45ab4d4503 | ||
|
|
0ceef12eea | ||
|
|
6738360051 | ||
|
|
52b65492d5 | ||
|
|
7a9a99d2a0 | ||
|
|
5bfb06b417 | ||
|
|
2ce8f1f686 | ||
|
|
1a47601533 | ||
|
|
5245aeea8f | ||
|
|
dd93db40bc | ||
|
|
136cf1d5a8 | ||
|
|
751522087a | ||
|
|
7fe830acfc | ||
|
|
cdfe686987 | ||
|
|
5b5723343c | ||
|
|
30c24a66f6 | ||
|
|
ecf9733bc1 | ||
|
|
133312fb40 | ||
|
|
b62ffb533c | ||
|
|
d75fb76338 | ||
|
|
51f2d43fed | ||
|
|
e3a645e8fb | ||
|
|
180046a3c5 | ||
|
|
916742ab9d | ||
|
|
d91f34dd42 | ||
|
|
5676976564 | ||
|
|
85aa3e7d9c | ||
|
|
a2ff6613c5 | ||
|
|
8d6cb5eee0 | ||
|
|
31445e391a | ||
|
|
04c3a5a861 | ||
|
|
5667cc9702 | ||
|
|
c0f95f971a | ||
|
|
f125f5bd32 | ||
|
|
f3eca8c7a7 | ||
|
|
f22e5f965e | ||
|
|
749f539dfc | ||
|
|
1247207afe | ||
|
|
5c0e9d8fbb | ||
|
|
957fa7a994 | ||
|
|
751c2e1d17 | ||
|
|
519645c0b0 | ||
|
|
0d0a318c3c | ||
|
|
588e0c4611 | ||
|
|
79144a6365 | ||
|
|
ca53c20370 | ||
|
|
d635503f49 | ||
|
|
920966f895 | ||
|
|
c46e0d3ecc | ||
|
|
c6ecf0095b | ||
|
|
7de6f6e44c | ||
|
|
035f85c3ba | ||
|
|
6f6a34d126 | ||
|
|
fff1f1cf27 | ||
|
|
1869854d70 | ||
|
|
4dd2998592 | ||
|
|
a4a174b3dc | ||
|
|
65c83317aa | ||
|
|
e95e0052da | ||
|
|
0ecafcd38e | ||
|
|
cadfe14abe | ||
|
|
75dd6fb28b | ||
|
|
eef93024d5 | ||
|
|
cd73cb0b3e |
@@ -690,8 +690,8 @@ HELP_AND_FAQ_URL=https://librechat.ai
|
||||
# REDIS_PING_INTERVAL=300
|
||||
|
||||
# Force specific cache namespaces to use in-memory storage even when Redis is enabled
|
||||
# Comma-separated list of CacheKeys (e.g., STATIC_CONFIG,ROLES,MESSAGES)
|
||||
# FORCED_IN_MEMORY_CACHE_NAMESPACES=STATIC_CONFIG,ROLES
|
||||
# Comma-separated list of CacheKeys (e.g., ROLES,MESSAGES)
|
||||
# FORCED_IN_MEMORY_CACHE_NAMESPACES=ROLES,MESSAGES
|
||||
|
||||
#==================================================#
|
||||
# Others #
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# v0.8.0-rc3
|
||||
# v0.8.0-rc4
|
||||
|
||||
# Base node image
|
||||
FROM node:20-alpine AS node
|
||||
@@ -30,7 +30,7 @@ RUN \
|
||||
# Allow mounting of these files, which have no default
|
||||
touch .env ; \
|
||||
# Create directories for the volumes to inherit the correct permissions
|
||||
mkdir -p /app/client/public/images /app/api/logs ; \
|
||||
mkdir -p /app/client/public/images /app/api/logs /app/uploads ; \
|
||||
npm config set fetch-retry-maxtimeout 600000 ; \
|
||||
npm config set fetch-retries 5 ; \
|
||||
npm config set fetch-retry-mintimeout 15000 ; \
|
||||
@@ -44,8 +44,6 @@ RUN \
|
||||
npm prune --production; \
|
||||
npm cache clean --force
|
||||
|
||||
RUN mkdir -p /app/client/public/images /app/api/logs
|
||||
|
||||
# Node API setup
|
||||
EXPOSE 3080
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Dockerfile.multi
|
||||
# v0.8.0-rc3
|
||||
# v0.8.0-rc4
|
||||
|
||||
# Base for all builds
|
||||
FROM node:20-alpine AS base-min
|
||||
|
||||
@@ -75,6 +75,7 @@
|
||||
- 🔍 **Web Search**:
|
||||
- Search the internet and retrieve relevant information to enhance your AI context
|
||||
- Combines search providers, content scrapers, and result rerankers for optimal results
|
||||
- **Customizable Jina Reranking**: Configure custom Jina API URLs for reranking services
|
||||
- **[Learn More →](https://www.librechat.ai/docs/features/web_search)**
|
||||
|
||||
- 🪄 **Generative UI with Code Artifacts**:
|
||||
|
||||
@@ -10,7 +10,17 @@ const {
|
||||
validateVisionModel,
|
||||
} = require('librechat-data-provider');
|
||||
const { SplitStreamHandler: _Handler } = require('@librechat/agents');
|
||||
const { Tokenizer, createFetch, createStreamEventHandlers } = require('@librechat/api');
|
||||
const {
|
||||
Tokenizer,
|
||||
createFetch,
|
||||
matchModelName,
|
||||
getClaudeHeaders,
|
||||
getModelMaxTokens,
|
||||
configureReasoning,
|
||||
checkPromptCacheSupport,
|
||||
getModelMaxOutputTokens,
|
||||
createStreamEventHandlers,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
truncateText,
|
||||
formatMessage,
|
||||
@@ -19,12 +29,6 @@ const {
|
||||
parseParamFromPrompt,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const {
|
||||
getClaudeHeaders,
|
||||
configureReasoning,
|
||||
checkPromptCacheSupport,
|
||||
} = require('~/server/services/Endpoints/anthropic/helpers');
|
||||
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { sleep } = require('~/server/utils');
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const { google } = require('googleapis');
|
||||
const { getModelMaxTokens } = require('@librechat/api');
|
||||
const { concat } = require('@langchain/core/utils/stream');
|
||||
const { ChatVertexAI } = require('@langchain/google-vertexai');
|
||||
const { Tokenizer, getSafetySettings } = require('@librechat/api');
|
||||
@@ -21,7 +22,6 @@ const {
|
||||
} = require('librechat-data-provider');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
const {
|
||||
|
||||
@@ -7,7 +7,9 @@ const {
|
||||
createFetch,
|
||||
resolveHeaders,
|
||||
constructAzureURL,
|
||||
getModelMaxTokens,
|
||||
genAzureChatCompletion,
|
||||
getModelMaxOutputTokens,
|
||||
createStreamEventHandlers,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
@@ -31,13 +33,13 @@ const {
|
||||
titleInstruction,
|
||||
createContextHandlers,
|
||||
} = require('./prompts');
|
||||
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
|
||||
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
|
||||
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
const { runTitleChain } = require('./chains');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
const { tokenSplit } = require('./document');
|
||||
const BaseClient = require('./BaseClient');
|
||||
const { createLLM } = require('./llm');
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { getModelMaxTokens } = require('@librechat/api');
|
||||
const BaseClient = require('../BaseClient');
|
||||
const { getModelMaxTokens } = require('../../../utils');
|
||||
|
||||
class FakeClient extends BaseClient {
|
||||
constructor(apiKey, options = {}) {
|
||||
|
||||
@@ -71,9 +71,10 @@ const primeFiles = async (options) => {
|
||||
* @param {ServerRequest} options.req
|
||||
* @param {Array<{ file_id: string; filename: string }>} options.files
|
||||
* @param {string} [options.entity_id]
|
||||
* @param {boolean} [options.fileCitations=false] - Whether to include citation instructions
|
||||
* @returns
|
||||
*/
|
||||
const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
const createFileSearchTool = async ({ req, files, entity_id, fileCitations = false }) => {
|
||||
return tool(
|
||||
async ({ query }) => {
|
||||
if (files.length === 0) {
|
||||
@@ -142,9 +143,9 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
const formattedString = formattedResults
|
||||
.map(
|
||||
(result, index) =>
|
||||
`File: ${result.filename}\nAnchor: \\ue202turn0file${index} (${result.filename})\nRelevance: ${(1.0 - result.distance).toFixed(4)}\nContent: ${
|
||||
result.content
|
||||
}\n`,
|
||||
`File: ${result.filename}${
|
||||
fileCitations ? `\nAnchor: \\ue202turn0file${index} (${result.filename})` : ''
|
||||
}\nRelevance: ${(1.0 - result.distance).toFixed(4)}\nContent: ${result.content}\n`,
|
||||
)
|
||||
.join('\n---\n');
|
||||
|
||||
@@ -158,12 +159,14 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
|
||||
pageRelevance: result.page ? { [result.page]: 1.0 - result.distance } : {},
|
||||
}));
|
||||
|
||||
return [formattedString, { [Tools.file_search]: { sources } }];
|
||||
return [formattedString, { [Tools.file_search]: { sources, fileCitations } }];
|
||||
},
|
||||
{
|
||||
name: Tools.file_search,
|
||||
responseFormat: 'content_and_artifact',
|
||||
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.
|
||||
description: `Performs semantic search across attached "${Tools.file_search}" documents using natural language queries. This tool analyzes the content of uploaded files to find relevant information, quotes, and passages that best match your query. Use this to extract specific information or find relevant sections within the available documents.${
|
||||
fileCitations
|
||||
? `
|
||||
|
||||
**CITE FILE SEARCH RESULTS:**
|
||||
Use anchor markers immediately after statements derived from file content. Reference the filename in your text:
|
||||
@@ -171,7 +174,9 @@ Use anchor markers immediately after statements derived from file content. Refer
|
||||
- Page reference: "According to report.docx... \\ue202turn0file1"
|
||||
- Multi-file: "Multiple sources confirm... \\ue200\\ue202turn0file0\\ue202turn0file1\\ue201"
|
||||
|
||||
**ALWAYS mention the filename in your text before the citation marker. NEVER use markdown links or footnotes.**`,
|
||||
**ALWAYS mention the filename in your text before the citation marker. NEVER use markdown links or footnotes.**`
|
||||
: ''
|
||||
}`,
|
||||
schema: z.object({
|
||||
query: z
|
||||
.string()
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SerpAPI } = require('@langchain/community/tools/serpapi');
|
||||
const { Calculator } = require('@langchain/community/tools/calculator');
|
||||
const { mcpToolPattern, loadWebSearchAuth } = require('@librechat/api');
|
||||
const { mcpToolPattern, loadWebSearchAuth, checkAccess } = require('@librechat/api');
|
||||
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
|
||||
const { Tools, Constants, EToolResources, replaceSpecialVars } = require('librechat-data-provider');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
Permissions,
|
||||
EToolResources,
|
||||
PermissionTypes,
|
||||
replaceSpecialVars,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
availableTools,
|
||||
manifestToolMap,
|
||||
@@ -27,6 +34,7 @@ const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { createMCPTool, createMCPTools } = require('~/server/services/MCP');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
|
||||
/**
|
||||
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
|
||||
@@ -281,7 +289,29 @@ const loadTools = async ({
|
||||
if (toolContext) {
|
||||
toolContextMap[tool] = toolContext;
|
||||
}
|
||||
return createFileSearchTool({ req: options.req, files, entity_id: agent?.id });
|
||||
|
||||
/** @type {boolean | undefined} Check if user has FILE_CITATIONS permission */
|
||||
let fileCitations;
|
||||
if (fileCitations == null && options.req?.user != null) {
|
||||
try {
|
||||
fileCitations = await checkAccess({
|
||||
user: options.req.user,
|
||||
permissionType: PermissionTypes.FILE_CITATIONS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[handleTools] FILE_CITATIONS permission check failed:', error);
|
||||
fileCitations = false;
|
||||
}
|
||||
}
|
||||
|
||||
return createFileSearchTool({
|
||||
req: options.req,
|
||||
files,
|
||||
entity_id: agent?.id,
|
||||
fileCitations,
|
||||
});
|
||||
};
|
||||
continue;
|
||||
} else if (tool === Tools.web_search) {
|
||||
|
||||
3
api/cache/cacheConfig.spec.js
vendored
3
api/cache/cacheConfig.spec.js
vendored
@@ -157,12 +157,11 @@ describe('cacheConfig', () => {
|
||||
|
||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
||||
test('should parse comma-separated cache keys correctly', () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, STATIC_CONFIG ,MESSAGES ';
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, MESSAGES ';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([
|
||||
'ROLES',
|
||||
'STATIC_CONFIG',
|
||||
'MESSAGES',
|
||||
]);
|
||||
});
|
||||
|
||||
2
api/cache/getLogStores.js
vendored
2
api/cache/getLogStores.js
vendored
@@ -31,8 +31,8 @@ const namespaces = {
|
||||
[CacheKeys.SAML_SESSION]: sessionCache(CacheKeys.SAML_SESSION),
|
||||
|
||||
[CacheKeys.ROLES]: standardCache(CacheKeys.ROLES),
|
||||
[CacheKeys.APP_CONFIG]: standardCache(CacheKeys.APP_CONFIG),
|
||||
[CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE),
|
||||
[CacheKeys.STATIC_CONFIG]: standardCache(CacheKeys.STATIC_CONFIG),
|
||||
[CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ),
|
||||
[CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }),
|
||||
[CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { MCPManager, FlowStateManager } = require('@librechat/api');
|
||||
const { EventSource } = require('eventsource');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const { MCPManager, FlowStateManager, OAuthReconnectionManager } = require('@librechat/api');
|
||||
const logger = require('./winston');
|
||||
|
||||
global.EventSource = EventSource;
|
||||
@@ -26,4 +26,6 @@ module.exports = {
|
||||
createMCPManager: MCPManager.createInstance,
|
||||
getMCPManager: MCPManager.getInstance,
|
||||
getFlowStateManager,
|
||||
createOAuthReconnectionManager: OAuthReconnectionManager.createInstance,
|
||||
getOAuthReconnectionManager: OAuthReconnectionManager.getInstance,
|
||||
};
|
||||
|
||||
@@ -189,11 +189,15 @@ async function createAutoRefillTransaction(txData) {
|
||||
* @param {txData} _txData - Transaction data.
|
||||
*/
|
||||
async function createTransaction(_txData) {
|
||||
const { balance, ...txData } = _txData;
|
||||
const { balance, transactions, ...txData } = _txData;
|
||||
if (txData.rawAmount != null && isNaN(txData.rawAmount)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (transactions?.enabled === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const transaction = new Transaction(txData);
|
||||
transaction.endpointTokenConfig = txData.endpointTokenConfig;
|
||||
calculateTokenValue(transaction);
|
||||
@@ -222,7 +226,11 @@ async function createTransaction(_txData) {
|
||||
* @param {txData} _txData - Transaction data.
|
||||
*/
|
||||
async function createStructuredTransaction(_txData) {
|
||||
const { balance, ...txData } = _txData;
|
||||
const { balance, transactions, ...txData } = _txData;
|
||||
if (transactions?.enabled === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const transaction = new Transaction({
|
||||
...txData,
|
||||
endpointTokenConfig: txData.endpointTokenConfig,
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
const mongoose = require('mongoose');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { spendTokens, spendStructuredTokens } = require('./spendTokens');
|
||||
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { createTransaction } = require('./Transaction');
|
||||
const { Balance } = require('~/db/models');
|
||||
const { createTransaction, createStructuredTransaction } = require('./Transaction');
|
||||
const { Balance, Transaction } = require('~/db/models');
|
||||
|
||||
let mongoServer;
|
||||
beforeAll(async () => {
|
||||
@@ -380,3 +379,188 @@ describe('NaN Handling Tests', () => {
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transactions Config Tests', () => {
|
||||
test('createTransaction should not save when transactions.enabled is false', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'test',
|
||||
endpointTokenConfig: null,
|
||||
rawAmount: -100,
|
||||
tokenType: 'prompt',
|
||||
transactions: { enabled: false },
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createTransaction(txData);
|
||||
|
||||
// Assert: No transaction should be created
|
||||
expect(result).toBeUndefined();
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(0);
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
|
||||
test('createTransaction should save when transactions.enabled is true', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'test',
|
||||
endpointTokenConfig: null,
|
||||
rawAmount: -100,
|
||||
tokenType: 'prompt',
|
||||
transactions: { enabled: true },
|
||||
balance: { enabled: true },
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createTransaction(txData);
|
||||
|
||||
// Assert: Transaction should be created
|
||||
expect(result).toBeDefined();
|
||||
expect(result.balance).toBeLessThan(initialBalance);
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(1);
|
||||
expect(transactions[0].rawAmount).toBe(-100);
|
||||
});
|
||||
|
||||
test('createTransaction should save when balance.enabled is true even if transactions config is missing', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'test',
|
||||
endpointTokenConfig: null,
|
||||
rawAmount: -100,
|
||||
tokenType: 'prompt',
|
||||
balance: { enabled: true },
|
||||
// No transactions config provided
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createTransaction(txData);
|
||||
|
||||
// Assert: Transaction should be created (backward compatibility)
|
||||
expect(result).toBeDefined();
|
||||
expect(result.balance).toBeLessThan(initialBalance);
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('createTransaction should save transaction but not update balance when balance is disabled but transactions enabled', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'gpt-3.5-turbo';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'test',
|
||||
endpointTokenConfig: null,
|
||||
rawAmount: -100,
|
||||
tokenType: 'prompt',
|
||||
transactions: { enabled: true },
|
||||
balance: { enabled: false },
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createTransaction(txData);
|
||||
|
||||
// Assert: Transaction should be created but balance unchanged
|
||||
expect(result).toBeUndefined();
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(1);
|
||||
expect(transactions[0].rawAmount).toBe(-100);
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
|
||||
test('createStructuredTransaction should not save when transactions.enabled is false', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'claude-3-5-sonnet';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'message',
|
||||
tokenType: 'prompt',
|
||||
inputTokens: -10,
|
||||
writeTokens: -100,
|
||||
readTokens: -5,
|
||||
transactions: { enabled: false },
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createStructuredTransaction(txData);
|
||||
|
||||
// Assert: No transaction should be created
|
||||
expect(result).toBeUndefined();
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(0);
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
|
||||
test('createStructuredTransaction should save transaction but not update balance when balance is disabled but transactions enabled', async () => {
|
||||
// Arrange
|
||||
const userId = new mongoose.Types.ObjectId();
|
||||
const initialBalance = 10000000;
|
||||
await Balance.create({ user: userId, tokenCredits: initialBalance });
|
||||
|
||||
const model = 'claude-3-5-sonnet';
|
||||
const txData = {
|
||||
user: userId,
|
||||
conversationId: 'test-conversation-id',
|
||||
model,
|
||||
context: 'message',
|
||||
tokenType: 'prompt',
|
||||
inputTokens: -10,
|
||||
writeTokens: -100,
|
||||
readTokens: -5,
|
||||
transactions: { enabled: true },
|
||||
balance: { enabled: false },
|
||||
};
|
||||
|
||||
// Act
|
||||
const result = await createStructuredTransaction(txData);
|
||||
|
||||
// Assert: Transaction should be created but balance unchanged
|
||||
expect(result).toBeUndefined();
|
||||
const transactions = await Transaction.find({ user: userId });
|
||||
expect(transactions).toHaveLength(1);
|
||||
expect(transactions[0].inputTokens).toBe(-10);
|
||||
expect(transactions[0].writeTokens).toBe(-100);
|
||||
expect(transactions[0].readTokens).toBe(-5);
|
||||
const balance = await Balance.findOne({ user: userId });
|
||||
expect(balance.tokenCredits).toBe(initialBalance);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { matchModelName } = require('../utils/tokens');
|
||||
const { matchModelName } = require('@librechat/api');
|
||||
const defaultRate = 6;
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/backend",
|
||||
"version": "v0.8.0-rc3",
|
||||
"version": "v0.8.0-rc4",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"start": "echo 'please run this from the root directory'",
|
||||
@@ -49,14 +49,14 @@
|
||||
"@langchain/google-vertexai": "^0.2.13",
|
||||
"@langchain/openai": "^0.5.18",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.4.76",
|
||||
"@librechat/agents": "^2.4.79",
|
||||
"@librechat/api": "*",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@microsoft/microsoft-graph-client": "^3.0.7",
|
||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||
"@node-saml/passport-saml": "^5.1.0",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
"axios": "^1.12.1",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"compression": "^1.8.1",
|
||||
"connect-redis": "^8.1.0",
|
||||
|
||||
@@ -11,8 +11,9 @@ const {
|
||||
registerUser,
|
||||
} = require('~/server/services/AuthService');
|
||||
const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const { getGraphApiToken } = require('~/server/services/GraphTokenService');
|
||||
const { getOAuthReconnectionManager } = require('~/config');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
|
||||
const registrationController = async (req, res) => {
|
||||
try {
|
||||
@@ -75,7 +76,7 @@ const refreshController = async (req, res) => {
|
||||
if (!user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
const token = setOpenIDAuthTokens(tokenset, res);
|
||||
const token = setOpenIDAuthTokens(tokenset, res, user._id.toString());
|
||||
return res.status(200).send({ token, user });
|
||||
} catch (error) {
|
||||
logger.error('[refreshController] OpenID token refresh error', error);
|
||||
@@ -96,14 +97,25 @@ const refreshController = async (req, res) => {
|
||||
return res.status(200).send({ token, user });
|
||||
}
|
||||
|
||||
// Find the session with the hashed refresh token
|
||||
const session = await findSession({
|
||||
userId: userId,
|
||||
refreshToken: refreshToken,
|
||||
});
|
||||
/** Session with the hashed refresh token */
|
||||
const session = await findSession(
|
||||
{
|
||||
userId: userId,
|
||||
refreshToken: refreshToken,
|
||||
},
|
||||
{ lean: false },
|
||||
);
|
||||
|
||||
if (session && session.expiration > new Date()) {
|
||||
const token = await setAuthTokens(userId, res, session._id);
|
||||
const token = await setAuthTokens(userId, res, session);
|
||||
|
||||
// trigger OAuth MCP server reconnection asynchronously (best effort)
|
||||
void getOAuthReconnectionManager()
|
||||
.reconnectServers(userId)
|
||||
.catch((err) => {
|
||||
logger.error('Error reconnecting OAuth MCP servers:', err);
|
||||
});
|
||||
|
||||
res.status(200).send({ token, user });
|
||||
} else if (req?.query?.retry) {
|
||||
// Retrying from a refresh token request that failed (401)
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { webSearchKeys, extractWebSearchEnvVars, normalizeHttpError } = require('@librechat/api');
|
||||
const {
|
||||
webSearchKeys,
|
||||
extractWebSearchEnvVars,
|
||||
normalizeHttpError,
|
||||
MCPTokenStorage,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
getFiles,
|
||||
updateUser,
|
||||
@@ -16,11 +21,17 @@ const { verifyEmail, resendVerificationEmail } = require('~/server/services/Auth
|
||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||
const { Tools, Constants, FileSources } = require('librechat-data-provider');
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
const { Transaction, Balance, User } = require('~/db/models');
|
||||
const { Transaction, Balance, User, Token } = require('~/db/models');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
const { deleteAllSharedLinks } = require('~/models');
|
||||
const { getMCPManager } = require('~/config');
|
||||
const { MCPOAuthHandler } = require('@librechat/api');
|
||||
const { getFlowStateManager } = require('~/config');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { clearMCPServerTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
const { findToken } = require('~/models');
|
||||
|
||||
const getUserController = async (req, res) => {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
@@ -162,6 +173,15 @@ const updateUserPluginsController = async (req, res) => {
|
||||
);
|
||||
({ status, message } = normalizeHttpError(authService));
|
||||
}
|
||||
try {
|
||||
// if the MCP server uses OAuth, perform a full cleanup and token revocation
|
||||
await maybeUninstallOAuthMCP(user.id, pluginKey, appConfig);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[updateUserPluginsController] Error uninstalling OAuth MCP for ${pluginKey}:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// This handles:
|
||||
// 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}).
|
||||
@@ -269,6 +289,97 @@ const resendVerificationController = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* OAuth MCP specific uninstall logic
|
||||
*/
|
||||
const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
||||
if (!pluginKey.startsWith(Constants.mcp_prefix)) {
|
||||
// this is not an MCP server, so nothing to do here
|
||||
return;
|
||||
}
|
||||
|
||||
const serverName = pluginKey.replace(Constants.mcp_prefix, '');
|
||||
const mcpManager = getMCPManager(userId);
|
||||
const serverConfig = mcpManager.getRawConfig(serverName) ?? appConfig?.mcpServers?.[serverName];
|
||||
|
||||
if (!mcpManager.getOAuthServers().has(serverName)) {
|
||||
// this server does not use OAuth, so nothing to do here as well
|
||||
return;
|
||||
}
|
||||
|
||||
// 1. get client info used for revocation (client id, secret)
|
||||
const clientTokenData = await MCPTokenStorage.getClientInfoAndMetadata({
|
||||
userId,
|
||||
serverName,
|
||||
findToken,
|
||||
});
|
||||
if (clientTokenData == null) {
|
||||
return;
|
||||
}
|
||||
const { clientInfo, clientMetadata } = clientTokenData;
|
||||
|
||||
// 2. get decrypted tokens before deletion
|
||||
const tokens = await MCPTokenStorage.getTokens({
|
||||
userId,
|
||||
serverName,
|
||||
findToken,
|
||||
});
|
||||
|
||||
// 3. revoke OAuth tokens at the provider
|
||||
const revocationEndpoint =
|
||||
serverConfig.oauth?.revocation_endpoint ?? clientMetadata.revocation_endpoint;
|
||||
const revocationEndpointAuthMethodsSupported =
|
||||
serverConfig.oauth?.revocation_endpoint_auth_methods_supported ??
|
||||
clientMetadata.revocation_endpoint_auth_methods_supported;
|
||||
|
||||
if (tokens?.access_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.access_token, 'access', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth access token for ${serverName}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
if (tokens?.refresh_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.refresh_token, 'refresh', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth refresh token for ${serverName}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. delete tokens from the DB after revocation attempts
|
||||
await MCPTokenStorage.deleteUserTokens({
|
||||
userId,
|
||||
serverName,
|
||||
deleteToken: async (filter) => {
|
||||
await Token.deleteOne(filter);
|
||||
},
|
||||
});
|
||||
|
||||
// 5. clear the flow state for the OAuth tokens
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const flowId = MCPOAuthHandler.generateFlowId(userId, serverName);
|
||||
await flowManager.deleteFlow(flowId, 'mcp_get_tokens');
|
||||
await flowManager.deleteFlow(flowId, 'mcp_oauth');
|
||||
|
||||
// 6. clear the tools cache for the server
|
||||
await clearMCPServerTools({ userId, serverName });
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getUserController,
|
||||
getTermsStatusController,
|
||||
|
||||
342
api/server/controllers/agents/__tests__/callbacks.spec.js
Normal file
342
api/server/controllers/agents/__tests__/callbacks.spec.js
Normal file
@@ -0,0 +1,342 @@
|
||||
const { Tools } = require('librechat-data-provider');
|
||||
|
||||
// Mock all dependencies before requiring the module
|
||||
jest.mock('nanoid', () => ({
|
||||
nanoid: jest.fn(() => 'mock-id'),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/api', () => ({
|
||||
sendEvent: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/agents', () => ({
|
||||
EnvVar: { CODE_API_KEY: 'CODE_API_KEY' },
|
||||
Providers: { GOOGLE: 'google' },
|
||||
GraphEvents: {},
|
||||
getMessageId: jest.fn(),
|
||||
ToolEndHandler: jest.fn(),
|
||||
handleToolCalls: jest.fn(),
|
||||
ChatModelStreamHandler: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/Citations', () => ({
|
||||
processFileCitations: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/Code/process', () => ({
|
||||
processCodeOutput: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Tools/credentials', () => ({
|
||||
loadAuthValues: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/process', () => ({
|
||||
saveBase64Image: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('createToolEndCallback', () => {
|
||||
let req, res, artifactPromises, createToolEndCallback;
|
||||
let logger;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Get the mocked logger
|
||||
logger = require('@librechat/data-schemas').logger;
|
||||
|
||||
// Now require the module after all mocks are set up
|
||||
const callbacks = require('../callbacks');
|
||||
createToolEndCallback = callbacks.createToolEndCallback;
|
||||
|
||||
req = {
|
||||
user: { id: 'user123' },
|
||||
};
|
||||
res = {
|
||||
headersSent: false,
|
||||
write: jest.fn(),
|
||||
};
|
||||
artifactPromises = [];
|
||||
});
|
||||
|
||||
describe('ui_resources artifact handling', () => {
|
||||
it('should process ui_resources artifact and return attachment when headers not sent', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: {
|
||||
0: { type: 'button', label: 'Click me' },
|
||||
1: { type: 'input', placeholder: 'Enter text' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
|
||||
// Wait for all promises to resolve
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
// When headers are not sent, it returns attachment without writing
|
||||
expect(res.write).not.toHaveBeenCalled();
|
||||
|
||||
const attachment = results[0];
|
||||
expect(attachment).toEqual({
|
||||
type: Tools.ui_resources,
|
||||
messageId: 'run456',
|
||||
toolCallId: 'tool123',
|
||||
conversationId: 'thread789',
|
||||
[Tools.ui_resources]: {
|
||||
0: { type: 'button', label: 'Click me' },
|
||||
1: { type: 'input', placeholder: 'Enter text' },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should write to response when headers are already sent', async () => {
|
||||
res.headersSent = true;
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: {
|
||||
0: { type: 'carousel', items: [] },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
expect(res.write).toHaveBeenCalled();
|
||||
expect(results[0]).toEqual({
|
||||
type: Tools.ui_resources,
|
||||
messageId: 'run456',
|
||||
toolCallId: 'tool123',
|
||||
conversationId: 'thread789',
|
||||
[Tools.ui_resources]: {
|
||||
0: { type: 'carousel', items: [] },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle errors when processing ui_resources', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
// Mock res.write to throw an error
|
||||
res.headersSent = true;
|
||||
res.write.mockImplementation(() => {
|
||||
throw new Error('Write failed');
|
||||
});
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: {
|
||||
0: { type: 'test' },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Error processing artifact content:',
|
||||
expect.any(Error),
|
||||
);
|
||||
expect(results[0]).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle multiple artifacts including ui_resources', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: {
|
||||
0: { type: 'chart', data: [] },
|
||||
},
|
||||
},
|
||||
[Tools.web_search]: {
|
||||
results: ['result1', 'result2'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
// Both ui_resources and web_search should be processed
|
||||
expect(artifactPromises).toHaveLength(2);
|
||||
expect(results).toHaveLength(2);
|
||||
|
||||
// Check ui_resources attachment
|
||||
const uiResourceAttachment = results.find((r) => r?.type === Tools.ui_resources);
|
||||
expect(uiResourceAttachment).toBeTruthy();
|
||||
expect(uiResourceAttachment[Tools.ui_resources]).toEqual({
|
||||
0: { type: 'chart', data: [] },
|
||||
});
|
||||
|
||||
// Check web_search attachment
|
||||
const webSearchAttachment = results.find((r) => r?.type === Tools.web_search);
|
||||
expect(webSearchAttachment).toBeTruthy();
|
||||
expect(webSearchAttachment[Tools.web_search]).toEqual({
|
||||
results: ['result1', 'result2'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should not process artifacts when output has no artifacts', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
content: 'Some regular content',
|
||||
// No artifact property
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
|
||||
expect(artifactPromises).toHaveLength(0);
|
||||
expect(res.write).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty ui_resources data object', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
expect(results[0]).toEqual({
|
||||
type: Tools.ui_resources,
|
||||
messageId: 'run456',
|
||||
toolCallId: 'tool123',
|
||||
conversationId: 'thread789',
|
||||
[Tools.ui_resources]: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle ui_resources with complex nested data', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const complexData = {
|
||||
0: {
|
||||
type: 'form',
|
||||
fields: [
|
||||
{ name: 'field1', type: 'text', required: true },
|
||||
{ name: 'field2', type: 'select', options: ['a', 'b', 'c'] },
|
||||
],
|
||||
nested: {
|
||||
deep: {
|
||||
value: 123,
|
||||
array: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const output = {
|
||||
tool_call_id: 'tool123',
|
||||
artifact: {
|
||||
[Tools.ui_resources]: {
|
||||
data: complexData,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output }, metadata);
|
||||
const results = await Promise.all(artifactPromises);
|
||||
|
||||
expect(results[0][Tools.ui_resources]).toEqual(complexData);
|
||||
});
|
||||
|
||||
it('should handle when output is undefined', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback({ output: undefined }, metadata);
|
||||
|
||||
expect(artifactPromises).toHaveLength(0);
|
||||
expect(res.write).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle when data parameter is undefined', async () => {
|
||||
const toolEndCallback = createToolEndCallback({ req, res, artifactPromises });
|
||||
|
||||
const metadata = {
|
||||
run_id: 'run456',
|
||||
thread_id: 'thread789',
|
||||
};
|
||||
|
||||
await toolEndCallback(undefined, metadata);
|
||||
|
||||
expect(artifactPromises).toHaveLength(0);
|
||||
expect(res.write).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -265,6 +265,30 @@ function createToolEndCallback({ req, res, artifactPromises }) {
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: a lot of duplicated code in createToolEndCallback
|
||||
// we should refactor this to use a helper function in a follow-up PR
|
||||
if (output.artifact[Tools.ui_resources]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
const attachment = {
|
||||
type: Tools.ui_resources,
|
||||
messageId: metadata.run_id,
|
||||
toolCallId: output.tool_call_id,
|
||||
conversationId: metadata.thread_id,
|
||||
[Tools.ui_resources]: output.artifact[Tools.ui_resources].data,
|
||||
};
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
})().catch((error) => {
|
||||
logger.error('Error processing artifact content:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (output.artifact[Tools.web_search]) {
|
||||
artifactPromises.push(
|
||||
(async () => {
|
||||
|
||||
@@ -7,10 +7,12 @@ const {
|
||||
createRun,
|
||||
Tokenizer,
|
||||
checkAccess,
|
||||
logAxiosError,
|
||||
resolveHeaders,
|
||||
getBalanceConfig,
|
||||
memoryInstructions,
|
||||
formatContentStrings,
|
||||
getTransactionsConfig,
|
||||
createMemoryProcessor,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
@@ -87,11 +89,10 @@ function createTokenCounter(encoding) {
|
||||
}
|
||||
|
||||
function logToolError(graph, error, toolId) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
|
||||
logAxiosError({
|
||||
error,
|
||||
toolId,
|
||||
);
|
||||
message: `[api/server/controllers/agents/client.js #chatCompletion] Tool Error "${toolId}"`,
|
||||
});
|
||||
}
|
||||
|
||||
class AgentClient extends BaseClient {
|
||||
@@ -623,11 +624,13 @@ class AgentClient extends BaseClient {
|
||||
* @param {string} [params.model]
|
||||
* @param {string} [params.context='message']
|
||||
* @param {AppConfig['balance']} [params.balance]
|
||||
* @param {AppConfig['transactions']} [params.transactions]
|
||||
* @param {UsageMetadata[]} [params.collectedUsage=this.collectedUsage]
|
||||
*/
|
||||
async recordCollectedUsage({
|
||||
model,
|
||||
balance,
|
||||
transactions,
|
||||
context = 'message',
|
||||
collectedUsage = this.collectedUsage,
|
||||
}) {
|
||||
@@ -653,6 +656,7 @@ class AgentClient extends BaseClient {
|
||||
const txMetadata = {
|
||||
context,
|
||||
balance,
|
||||
transactions,
|
||||
conversationId: this.conversationId,
|
||||
user: this.user ?? this.options.req.user?.id,
|
||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||
@@ -868,11 +872,10 @@ class AgentClient extends BaseClient {
|
||||
if (agent.useLegacyContent === true) {
|
||||
messages = formatContentStrings(messages);
|
||||
}
|
||||
if (
|
||||
agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
|
||||
'prompt-caching',
|
||||
)
|
||||
) {
|
||||
const defaultHeaders =
|
||||
agent.model_parameters?.clientOptions?.defaultHeaders ??
|
||||
agent.model_parameters?.configuration?.defaultHeaders;
|
||||
if (defaultHeaders?.['anthropic-beta']?.includes('prompt-caching')) {
|
||||
messages = addCacheControl(messages);
|
||||
}
|
||||
|
||||
@@ -1051,7 +1054,12 @@ class AgentClient extends BaseClient {
|
||||
}
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
await this.recordCollectedUsage({ context: 'message', balance: balanceConfig });
|
||||
const transactionsConfig = getTransactionsConfig(appConfig);
|
||||
await this.recordCollectedUsage({
|
||||
context: 'message',
|
||||
balance: balanceConfig,
|
||||
transactions: transactionsConfig,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #chatCompletion] Error recording collected usage',
|
||||
@@ -1245,11 +1253,13 @@ class AgentClient extends BaseClient {
|
||||
});
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
const transactionsConfig = getTransactionsConfig(appConfig);
|
||||
await this.recordCollectedUsage({
|
||||
collectedUsage,
|
||||
context: 'title',
|
||||
model: clientOptions.model,
|
||||
balance: balanceConfig,
|
||||
transactions: transactionsConfig,
|
||||
}).catch((err) => {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #titleConvo] Error recording collected usage',
|
||||
|
||||
@@ -237,6 +237,9 @@ describe('AgentClient - titleConvo', () => {
|
||||
balance: {
|
||||
enabled: false,
|
||||
},
|
||||
transactions: {
|
||||
enabled: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig } = require('@librechat/api');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -34,7 +34,6 @@ const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig } = require('@librechat/api');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
@@ -31,7 +31,6 @@ const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { generate2FATempToken } = require('~/server/services/twoFactorService');
|
||||
const { setAuthTokens } = require('~/server/services/AuthService');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const loginController = async (req, res) => {
|
||||
try {
|
||||
|
||||
50
api/server/controllers/auth/oauth.js
Normal file
50
api/server/controllers/auth/oauth.js
Normal file
@@ -0,0 +1,50 @@
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { checkBan } = require('~/server/middleware');
|
||||
|
||||
const domains = {
|
||||
client: process.env.DOMAIN_CLIENT,
|
||||
server: process.env.DOMAIN_SERVER,
|
||||
};
|
||||
|
||||
function createOAuthHandler(redirectUri = domains.client) {
|
||||
/**
|
||||
* A handler to process OAuth authentication results.
|
||||
* @type {Function}
|
||||
* @param {ServerRequest} req - Express request object.
|
||||
* @param {ServerResponse} res - Express response object.
|
||||
* @param {NextFunction} next - Express next middleware function.
|
||||
*/
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
if (res.headersSent) {
|
||||
return;
|
||||
}
|
||||
|
||||
await checkBan(req, res);
|
||||
if (req.banned) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
req.user &&
|
||||
req.user.provider == 'openid' &&
|
||||
isEnabled(process.env.OPENID_REUSE_TOKENS) === true
|
||||
) {
|
||||
await syncUserEntraGroupMemberships(req.user, req.user.tokenset.access_token);
|
||||
setOpenIDAuthTokens(req.user.tokenset, res, req.user._id.toString());
|
||||
} else {
|
||||
await setAuthTokens(req.user._id, res);
|
||||
}
|
||||
res.redirect(redirectUri);
|
||||
} catch (err) {
|
||||
logger.error('Error in setting authentication tokens:', err);
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createOAuthHandler,
|
||||
};
|
||||
@@ -12,7 +12,8 @@ const { logger } = require('@librechat/data-schemas');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const { isEnabled, ErrorController } = require('@librechat/api');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
const validateImageRequest = require('./middleware/validateImageRequest');
|
||||
const initializeOAuthReconnectManager = require('./services/initializeOAuthReconnectManager');
|
||||
const createValidateImageRequest = require('./middleware/validateImageRequest');
|
||||
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
|
||||
const { updateInterfacePermissions } = require('~/models/interface');
|
||||
const { checkMigrations } = require('./services/start/migration');
|
||||
@@ -108,6 +109,7 @@ const startServer = async () => {
|
||||
app.use('/oauth', routes.oauth);
|
||||
/* API Endpoints */
|
||||
app.use('/api/auth', routes.auth);
|
||||
app.use('/api/admin', routes.adminAuth);
|
||||
app.use('/api/actions', routes.actions);
|
||||
app.use('/api/keys', routes.keys);
|
||||
app.use('/api/user', routes.user);
|
||||
@@ -126,7 +128,7 @@ const startServer = async () => {
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/assistants', routes.assistants);
|
||||
app.use('/api/files', await routes.files.initialize());
|
||||
app.use('/images/', validateImageRequest, routes.staticRoute);
|
||||
app.use('/images/', createValidateImageRequest(appConfig.secureImageLinks), routes.staticRoute);
|
||||
app.use('/api/share', routes.share);
|
||||
app.use('/api/roles', routes.roles);
|
||||
app.use('/api/agents', routes.agents);
|
||||
@@ -154,7 +156,7 @@ const startServer = async () => {
|
||||
res.send(updatedIndexHtml);
|
||||
});
|
||||
|
||||
app.listen(port, host, () => {
|
||||
app.listen(port, host, async () => {
|
||||
if (host === '0.0.0.0') {
|
||||
logger.info(
|
||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||
@@ -163,7 +165,9 @@ const startServer = async () => {
|
||||
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
|
||||
}
|
||||
|
||||
initializeMCPs().then(() => checkMigrations());
|
||||
await initializeMCPs();
|
||||
await initializeOAuthReconnectManager();
|
||||
await checkMigrations();
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -11,18 +11,25 @@ const { getAppConfig } = require('~/server/services/Config');
|
||||
* @param {Object} res - Express response object.
|
||||
* @param {Function} next - Next middleware function.
|
||||
*
|
||||
* @returns {Promise<function|Object>} - Returns a Promise which when resolved calls next middleware if the domain's email is allowed
|
||||
* @returns {Promise<void>} - Calls next middleware if the domain's email is allowed, otherwise redirects to login
|
||||
*/
|
||||
const checkDomainAllowed = async (req, res, next = () => {}) => {
|
||||
const email = req?.user?.email;
|
||||
const appConfig = await getAppConfig({
|
||||
role: req?.user?.role,
|
||||
});
|
||||
if (email && !isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(`[Social Login] [Social Login not allowed] [Email: ${email}]`);
|
||||
return res.redirect('/login');
|
||||
} else {
|
||||
return next();
|
||||
const checkDomainAllowed = async (req, res, next) => {
|
||||
try {
|
||||
const email = req?.user?.email;
|
||||
const appConfig = await getAppConfig({
|
||||
role: req?.user?.role,
|
||||
});
|
||||
|
||||
if (email && !isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(`[Social Login] [Social Login not allowed] [Email: ${email}]`);
|
||||
res.redirect('/login');
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('[checkDomainAllowed] Error checking domain:', error);
|
||||
res.redirect('/login');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
const validatePasswordReset = require('./validatePasswordReset');
|
||||
const validateRegistration = require('./validateRegistration');
|
||||
const validateImageRequest = require('./validateImageRequest');
|
||||
const buildEndpointOption = require('./buildEndpointOption');
|
||||
const validateMessageReq = require('./validateMessageReq');
|
||||
const checkDomainAllowed = require('./checkDomainAllowed');
|
||||
@@ -15,6 +14,7 @@ const checkInviteUser = require('./checkInviteUser');
|
||||
const requireJwtAuth = require('./requireJwtAuth');
|
||||
const configMiddleware = require('./config/app');
|
||||
const validateModel = require('./validateModel');
|
||||
const requireAdmin = require('./requireAdmin');
|
||||
const moderateText = require('./moderateText');
|
||||
const logHeaders = require('./logHeaders');
|
||||
const setHeaders = require('./setHeaders');
|
||||
@@ -37,6 +37,7 @@ module.exports = {
|
||||
setHeaders,
|
||||
logHeaders,
|
||||
moderateText,
|
||||
requireAdmin,
|
||||
validateModel,
|
||||
requireJwtAuth,
|
||||
checkInviteUser,
|
||||
@@ -50,6 +51,5 @@ module.exports = {
|
||||
validateMessageReq,
|
||||
buildEndpointOption,
|
||||
validateRegistration,
|
||||
validateImageRequest,
|
||||
validatePasswordReset,
|
||||
};
|
||||
|
||||
22
api/server/middleware/requireAdmin.js
Normal file
22
api/server/middleware/requireAdmin.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Middleware to check if authenticated user has admin role
|
||||
* Should be used AFTER authentication middleware (requireJwtAuth, requireLocalAuth, etc.)
|
||||
*/
|
||||
const requireAdmin = (req, res, next) => {
|
||||
if (!req.user) {
|
||||
logger.warn('[requireAdmin] No user found in request');
|
||||
return res.status(401).json({ message: 'Authentication required' });
|
||||
}
|
||||
|
||||
if (!req.user.role || req.user.role !== SystemRoles.ADMIN) {
|
||||
logger.debug('[requireAdmin] Access denied for non-admin user:', req.user.email);
|
||||
return res.status(403).json({ message: 'Access denied: Admin privileges required' });
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = requireAdmin;
|
||||
@@ -1,6 +1,6 @@
|
||||
const passport = require('passport');
|
||||
const cookies = require('cookie');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const passport = require('passport');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
|
||||
/**
|
||||
* Custom Middleware to handle JWT authentication, with support for OpenID token reuse
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const validateImageRequest = require('~/server/middleware/validateImageRequest');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const createValidateImageRequest = require('~/server/middleware/validateImageRequest');
|
||||
|
||||
jest.mock('~/server/services/Config/app', () => ({
|
||||
getAppConfig: jest.fn(),
|
||||
jest.mock('@librechat/api', () => ({
|
||||
isEnabled: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('validateImageRequest middleware', () => {
|
||||
let req, res, next;
|
||||
let req, res, next, validateImageRequest;
|
||||
const validObjectId = '65cfb246f7ecadb8b1e8036b';
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -22,116 +22,278 @@ describe('validateImageRequest middleware', () => {
|
||||
};
|
||||
next = jest.fn();
|
||||
process.env.JWT_REFRESH_SECRET = 'test-secret';
|
||||
process.env.OPENID_REUSE_TOKENS = 'false';
|
||||
|
||||
// Mock getAppConfig to return secureImageLinks: true by default
|
||||
getAppConfig.mockResolvedValue({
|
||||
secureImageLinks: true,
|
||||
});
|
||||
// Default: OpenID token reuse disabled
|
||||
isEnabled.mockReturnValue(false);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should call next() if secureImageLinks is false', async () => {
|
||||
getAppConfig.mockResolvedValue({
|
||||
secureImageLinks: false,
|
||||
describe('Factory function', () => {
|
||||
test('should return a pass-through middleware if secureImageLinks is false', async () => {
|
||||
const middleware = createValidateImageRequest(false);
|
||||
await middleware(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return validation middleware if secureImageLinks is true', async () => {
|
||||
validateImageRequest = createValidateImageRequest(true);
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.send).toHaveBeenCalledWith('Unauthorized');
|
||||
});
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return 401 if refresh token is not provided', async () => {
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.send).toHaveBeenCalledWith('Unauthorized');
|
||||
});
|
||||
describe('Standard LibreChat token flow', () => {
|
||||
beforeEach(() => {
|
||||
validateImageRequest = createValidateImageRequest(true);
|
||||
});
|
||||
|
||||
test('should return 403 if refresh token is invalid', async () => {
|
||||
req.headers.cookie = 'refreshToken=invalid-token';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
test('should return 401 if refresh token is not provided', async () => {
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.send).toHaveBeenCalledWith('Unauthorized');
|
||||
});
|
||||
|
||||
test('should return 403 if refresh token is expired', async () => {
|
||||
const expiredToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) - 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${expiredToken}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should call next() for valid image path', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/example.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return 403 for invalid image path', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/65cfb246f7ecadb8b1e8036c/example.jpg'; // Different ObjectId
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should return 403 for invalid ObjectId format', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/123/example.jpg'; // Invalid ObjectId
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
// File traversal tests
|
||||
test('should prevent file traversal attempts', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
|
||||
const traversalAttempts = [
|
||||
`/images/${validObjectId}/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/..%2F..%2F..%2Fetc%2Fpasswd`,
|
||||
`/images/${validObjectId}/image.jpg/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd`,
|
||||
];
|
||||
|
||||
for (const attempt of traversalAttempts) {
|
||||
req.originalUrl = attempt;
|
||||
test('should return 403 if refresh token is invalid', async () => {
|
||||
req.headers.cookie = 'refreshToken=invalid-token';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
jest.clearAllMocks();
|
||||
}
|
||||
});
|
||||
|
||||
test('should return 403 if refresh token is expired', async () => {
|
||||
const expiredToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) - 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${expiredToken}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should call next() for valid image path', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/example.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return 403 for invalid image path', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/65cfb246f7ecadb8b1e8036c/example.jpg'; // Different ObjectId
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should allow agent avatar pattern for any valid ObjectId', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/65cfb246f7ecadb8b1e8036c/agent-avatar-12345.png';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should prevent file traversal attempts', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
|
||||
const traversalAttempts = [
|
||||
`/images/${validObjectId}/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/..%2F..%2F..%2Fetc%2Fpasswd`,
|
||||
`/images/${validObjectId}/image.jpg/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd`,
|
||||
];
|
||||
|
||||
for (const attempt of traversalAttempts) {
|
||||
req.originalUrl = attempt;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
jest.clearAllMocks();
|
||||
// Reset mocks for next iteration
|
||||
res.status = jest.fn().mockReturnThis();
|
||||
res.send = jest.fn();
|
||||
}
|
||||
});
|
||||
|
||||
test('should handle URL encoded characters in valid paths', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/image%20with%20spaces.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle URL encoded characters in valid paths', async () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/image%20with%20spaces.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
describe('OpenID token flow', () => {
|
||||
beforeEach(() => {
|
||||
validateImageRequest = createValidateImageRequest(true);
|
||||
// Enable OpenID token reuse
|
||||
isEnabled.mockReturnValue(true);
|
||||
process.env.OPENID_REUSE_TOKENS = 'true';
|
||||
});
|
||||
|
||||
test('should return 403 if no OpenID user ID cookie when token_provider is openid', async () => {
|
||||
req.headers.cookie = 'refreshToken=dummy-token; token_provider=openid';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should validate JWT-signed user ID for OpenID flow', async () => {
|
||||
const signedUserId = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=dummy-token; token_provider=openid; openid_user_id=${signedUserId}`;
|
||||
req.originalUrl = `/images/${validObjectId}/example.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return 403 for invalid JWT-signed user ID', async () => {
|
||||
req.headers.cookie =
|
||||
'refreshToken=dummy-token; token_provider=openid; openid_user_id=invalid-jwt';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should return 403 for expired JWT-signed user ID', async () => {
|
||||
const expiredSignedUserId = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) - 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=dummy-token; token_provider=openid; openid_user_id=${expiredSignedUserId}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should validate image path against JWT-signed user ID', async () => {
|
||||
const signedUserId = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
const differentObjectId = '65cfb246f7ecadb8b1e8036c';
|
||||
req.headers.cookie = `refreshToken=dummy-token; token_provider=openid; openid_user_id=${signedUserId}`;
|
||||
req.originalUrl = `/images/${differentObjectId}/example.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should allow agent avatars in OpenID flow', async () => {
|
||||
const signedUserId = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=dummy-token; token_provider=openid; openid_user_id=${signedUserId}`;
|
||||
req.originalUrl = '/images/65cfb246f7ecadb8b1e8036c/agent-avatar-12345.png';
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security edge cases', () => {
|
||||
let validToken;
|
||||
|
||||
beforeEach(() => {
|
||||
validateImageRequest = createValidateImageRequest(true);
|
||||
validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
});
|
||||
|
||||
test('should handle very long image filenames', async () => {
|
||||
const longFilename = 'a'.repeat(1000) + '.jpg';
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/${longFilename}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle URLs with maximum practical length', async () => {
|
||||
// Most browsers support URLs up to ~2000 characters
|
||||
const longFilename = 'x'.repeat(1900) + '.jpg';
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/${longFilename}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should accept URLs just under the 2048 limit', async () => {
|
||||
// Create a URL exactly 2047 characters long
|
||||
const baseLength = `/images/${validObjectId}/`.length + '.jpg'.length;
|
||||
const filenameLength = 2047 - baseLength;
|
||||
const filename = 'a'.repeat(filenameLength) + '.jpg';
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/${filename}`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle malformed URL encoding gracefully', async () => {
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test%ZZinvalid.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should reject URLs with null bytes', async () => {
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test\x00.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should handle URLs with repeated slashes', async () => {
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}//test.jpg`;
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should reject extremely long URLs as potential DoS', async () => {
|
||||
// Create a URL longer than 2048 characters
|
||||
const baseLength = `/images/${validObjectId}/`.length + '.jpg'.length;
|
||||
const filenameLength = 2049 - baseLength; // Ensure total length exceeds 2048
|
||||
const extremelyLongFilename = 'x'.repeat(filenameLength) + '.jpg';
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/${extremelyLongFilename}`;
|
||||
// Verify our test URL is actually too long
|
||||
expect(req.originalUrl.length).toBeGreaterThan(2048);
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getAppConfig } = require('~/server/services/Config/app');
|
||||
|
||||
const OBJECT_ID_LENGTH = 24;
|
||||
const OBJECT_ID_PATTERN = /^[0-9a-f]{24}$/i;
|
||||
@@ -22,50 +22,129 @@ function isValidObjectId(id) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to validate image request.
|
||||
* Must be set by `secureImageLinks` via custom config file.
|
||||
* Validates a LibreChat refresh token
|
||||
* @param {string} refreshToken - The refresh token to validate
|
||||
* @returns {{valid: boolean, userId?: string, error?: string}} - Validation result
|
||||
*/
|
||||
async function validateImageRequest(req, res, next) {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
if (!appConfig.secureImageLinks) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;
|
||||
if (!refreshToken) {
|
||||
logger.warn('[validateImageRequest] Refresh token not provided');
|
||||
return res.status(401).send('Unauthorized');
|
||||
}
|
||||
|
||||
let payload;
|
||||
function validateToken(refreshToken) {
|
||||
try {
|
||||
payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
|
||||
|
||||
if (!isValidObjectId(payload.id)) {
|
||||
return { valid: false, error: 'Invalid User ID' };
|
||||
}
|
||||
|
||||
const currentTimeInSeconds = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp < currentTimeInSeconds) {
|
||||
return { valid: false, error: 'Refresh token expired' };
|
||||
}
|
||||
|
||||
return { valid: true, userId: payload.id };
|
||||
} catch (err) {
|
||||
logger.warn('[validateImageRequest]', err);
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (!isValidObjectId(payload.id)) {
|
||||
logger.warn('[validateImageRequest] Invalid User ID');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const currentTimeInSeconds = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp < currentTimeInSeconds) {
|
||||
logger.warn('[validateImageRequest] Refresh token expired');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const fullPath = decodeURIComponent(req.originalUrl);
|
||||
const pathPattern = new RegExp(`^/images/${payload.id}/[^/]+$`);
|
||||
|
||||
if (pathPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
next();
|
||||
} else {
|
||||
logger.warn('[validateImageRequest] Invalid image path');
|
||||
res.status(403).send('Access Denied');
|
||||
logger.warn('[validateToken]', err);
|
||||
return { valid: false, error: 'Invalid token' };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = validateImageRequest;
|
||||
/**
|
||||
* Factory to create the `validateImageRequest` middleware with configured secureImageLinks
|
||||
* @param {boolean} [secureImageLinks] - Whether secure image links are enabled
|
||||
*/
|
||||
function createValidateImageRequest(secureImageLinks) {
|
||||
if (!secureImageLinks) {
|
||||
return (_req, _res, next) => next();
|
||||
}
|
||||
/**
|
||||
* Middleware to validate image request.
|
||||
* Supports both LibreChat refresh tokens and OpenID JWT tokens.
|
||||
* Must be set by `secureImageLinks` via custom config file.
|
||||
*/
|
||||
return async function validateImageRequest(req, res, next) {
|
||||
try {
|
||||
const cookieHeader = req.headers.cookie;
|
||||
if (!cookieHeader) {
|
||||
logger.warn('[validateImageRequest] No cookies provided');
|
||||
return res.status(401).send('Unauthorized');
|
||||
}
|
||||
|
||||
const parsedCookies = cookies.parse(cookieHeader);
|
||||
const refreshToken = parsedCookies.refreshToken;
|
||||
|
||||
if (!refreshToken) {
|
||||
logger.warn('[validateImageRequest] Token not provided');
|
||||
return res.status(401).send('Unauthorized');
|
||||
}
|
||||
|
||||
const tokenProvider = parsedCookies.token_provider;
|
||||
let userIdForPath;
|
||||
|
||||
if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
const openidUserId = parsedCookies.openid_user_id;
|
||||
if (!openidUserId) {
|
||||
logger.warn('[validateImageRequest] No OpenID user ID cookie found');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const validationResult = validateToken(openidUserId);
|
||||
if (!validationResult.valid) {
|
||||
logger.warn(`[validateImageRequest] ${validationResult.error}`);
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
userIdForPath = validationResult.userId;
|
||||
} else {
|
||||
const validationResult = validateToken(refreshToken);
|
||||
if (!validationResult.valid) {
|
||||
logger.warn(`[validateImageRequest] ${validationResult.error}`);
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
userIdForPath = validationResult.userId;
|
||||
}
|
||||
|
||||
if (!userIdForPath) {
|
||||
logger.warn('[validateImageRequest] No user ID available for path validation');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const MAX_URL_LENGTH = 2048;
|
||||
if (req.originalUrl.length > MAX_URL_LENGTH) {
|
||||
logger.warn('[validateImageRequest] URL too long');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (req.originalUrl.includes('\x00')) {
|
||||
logger.warn('[validateImageRequest] URL contains null byte');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
let fullPath;
|
||||
try {
|
||||
fullPath = decodeURIComponent(req.originalUrl);
|
||||
} catch {
|
||||
logger.warn('[validateImageRequest] Invalid URL encoding');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const agentAvatarPattern = /^\/images\/[a-f0-9]{24}\/agent-[^/]*$/;
|
||||
if (agentAvatarPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
return next();
|
||||
}
|
||||
|
||||
const escapedUserId = userIdForPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const pathPattern = new RegExp(`^/images/${escapedUserId}/[^/]+$`);
|
||||
|
||||
if (pathPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
next();
|
||||
} else {
|
||||
logger.warn('[validateImageRequest] Invalid image path');
|
||||
res.status(403).send('Access Denied');
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[validateImageRequest] Error:', error);
|
||||
res.status(500).send('Internal Server Error');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = createValidateImageRequest;
|
||||
|
||||
66
api/server/routes/admin/auth.js
Normal file
66
api/server/routes/admin/auth.js
Normal file
@@ -0,0 +1,66 @@
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const { randomState } = require('openid-client');
|
||||
const { createSetBalanceConfig } = require('@librechat/api');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { createOAuthHandler } = require('~/server/controllers/auth/oauth');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { getOpenIdConfig } = require('~/strategies');
|
||||
const middleware = require('~/server/middleware');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
const setBalanceConfig = createSetBalanceConfig({
|
||||
getAppConfig,
|
||||
Balance,
|
||||
});
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post(
|
||||
'/login/local',
|
||||
middleware.logHeaders,
|
||||
middleware.loginLimiter,
|
||||
middleware.checkBan,
|
||||
middleware.requireLocalAuth,
|
||||
middleware.requireAdmin,
|
||||
setBalanceConfig,
|
||||
loginController,
|
||||
);
|
||||
|
||||
router.get('/verify', middleware.requireJwtAuth, middleware.requireAdmin, (req, res) => {
|
||||
const { password: _p, totpSecret: _t, __v, ...user } = req.user;
|
||||
user.id = user._id.toString();
|
||||
res.status(200).json({ user });
|
||||
});
|
||||
|
||||
router.get('/oauth/openid/check', (req, res) => {
|
||||
const openidConfig = getOpenIdConfig();
|
||||
if (!openidConfig) {
|
||||
return res.status(404).json({ message: 'OpenID configuration not found' });
|
||||
}
|
||||
res.status(200).json({ message: 'OpenID check successful' });
|
||||
});
|
||||
|
||||
router.get('/oauth/openid', (req, res, next) => {
|
||||
return passport.authenticate('openidAdmin', {
|
||||
session: false,
|
||||
state: randomState(),
|
||||
})(req, res, next);
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/oauth/openid/callback',
|
||||
passport.authenticate('openidAdmin', {
|
||||
failureRedirect: `${process.env.DOMAIN_CLIENT}/oauth/error`,
|
||||
failureMessage: true,
|
||||
session: false,
|
||||
}),
|
||||
middleware.requireAdmin,
|
||||
setBalanceConfig,
|
||||
middleware.checkDomainAllowed,
|
||||
createOAuthHandler(
|
||||
(process.env.ADMIN_PANEL_URL || 'http://localhost:3000') + '/auth/openid/callback',
|
||||
),
|
||||
);
|
||||
|
||||
module.exports = router;
|
||||
@@ -122,7 +122,6 @@ router.get('/', async function (req, res) {
|
||||
payload.minPasswordLength = minPasswordLength;
|
||||
}
|
||||
|
||||
payload.mcpServers = {};
|
||||
const getMCPServers = () => {
|
||||
try {
|
||||
if (appConfig?.mcpConfig == null) {
|
||||
@@ -136,6 +135,9 @@ router.get('/', async function (req, res) {
|
||||
if (!mcpServers) return;
|
||||
const oauthServers = mcpManager.getOAuthServers();
|
||||
for (const serverName in mcpServers) {
|
||||
if (!payload.mcpServers) {
|
||||
payload.mcpServers = {};
|
||||
}
|
||||
const serverConfig = mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = removeNullishValues({
|
||||
startup: serverConfig?.startup,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const accessPermissions = require('./accessPermissions');
|
||||
const assistants = require('./assistants');
|
||||
const categories = require('./categories');
|
||||
const adminAuth = require('./admin/auth');
|
||||
const tokenizer = require('./tokenizer');
|
||||
const endpoints = require('./endpoints');
|
||||
const staticRoute = require('./static');
|
||||
@@ -32,6 +33,7 @@ module.exports = {
|
||||
mcp,
|
||||
edit,
|
||||
auth,
|
||||
adminAuth,
|
||||
keys,
|
||||
user,
|
||||
tags,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const { Router } = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { MCPOAuthHandler, getUserMCPAuthMap } = require('@librechat/api');
|
||||
const { getMCPManager, getFlowStateManager, getOAuthReconnectionManager } = require('~/config');
|
||||
const { getMCPSetupData, getServerConnectionStatus } = require('~/server/services/MCP');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { updateMCPUserTools } = require('~/server/services/Config/mcpToolsCache');
|
||||
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
|
||||
const { CacheKeys, Constants } = require('librechat-data-provider');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { reinitMCPServer } = require('~/server/services/Tools/mcp');
|
||||
const { requireJwtAuth } = require('~/server/middleware');
|
||||
const { findPluginAuthsByKeys } = require('~/models');
|
||||
@@ -144,6 +144,10 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
||||
`[MCP OAuth] Successfully reconnected ${serverName} for user ${flowState.userId}`,
|
||||
);
|
||||
|
||||
// clear any reconnection attempts
|
||||
const oauthReconnectionManager = getOAuthReconnectionManager();
|
||||
oauthReconnectionManager.clearReconnection(flowState.userId, serverName);
|
||||
|
||||
const tools = await userConnection.fetchTools();
|
||||
await updateMCPUserTools({
|
||||
userId: flowState.userId,
|
||||
|
||||
@@ -4,10 +4,9 @@ const passport = require('passport');
|
||||
const { randomState } = require('openid-client');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { isEnabled, createSetBalanceConfig } = require('@librechat/api');
|
||||
const { checkDomainAllowed, loginLimiter, logHeaders, checkBan } = require('~/server/middleware');
|
||||
const { syncUserEntraGroupMemberships } = require('~/server/services/PermissionService');
|
||||
const { setAuthTokens, setOpenIDAuthTokens } = require('~/server/services/AuthService');
|
||||
const { createSetBalanceConfig } = require('@librechat/api');
|
||||
const { checkDomainAllowed, loginLimiter, logHeaders } = require('~/server/middleware');
|
||||
const { createOAuthHandler } = require('~/server/controllers/auth/oauth');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { Balance } = require('~/db/models');
|
||||
|
||||
@@ -26,28 +25,7 @@ const domains = {
|
||||
router.use(logHeaders);
|
||||
router.use(loginLimiter);
|
||||
|
||||
const oauthHandler = async (req, res) => {
|
||||
try {
|
||||
await checkDomainAllowed(req, res);
|
||||
await checkBan(req, res);
|
||||
if (req.banned) {
|
||||
return;
|
||||
}
|
||||
if (
|
||||
req.user &&
|
||||
req.user.provider == 'openid' &&
|
||||
isEnabled(process.env.OPENID_REUSE_TOKENS) === true
|
||||
) {
|
||||
await syncUserEntraGroupMemberships(req.user, req.user.tokenset.access_token);
|
||||
setOpenIDAuthTokens(req.user.tokenset, res);
|
||||
} else {
|
||||
await setAuthTokens(req.user._id, res);
|
||||
}
|
||||
res.redirect(domains.client);
|
||||
} catch (err) {
|
||||
logger.error('Error in setting authentication tokens:', err);
|
||||
}
|
||||
};
|
||||
const oauthHandler = createOAuthHandler();
|
||||
|
||||
router.get('/error', (req, res) => {
|
||||
/** A single error message is pushed by passport when authentication fails. */
|
||||
@@ -79,6 +57,7 @@ router.get(
|
||||
scope: ['openid', 'profile', 'email'],
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -104,6 +83,7 @@ router.get(
|
||||
profileFields: ['id', 'email', 'name'],
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -125,6 +105,7 @@ router.get(
|
||||
session: false,
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -148,6 +129,7 @@ router.get(
|
||||
scope: ['user:email', 'read:user'],
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -171,6 +153,7 @@ router.get(
|
||||
scope: ['identify', 'email'],
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
@@ -192,6 +175,7 @@ router.post(
|
||||
session: false,
|
||||
}),
|
||||
setBalanceConfig,
|
||||
checkDomainAllowed,
|
||||
oauthHandler,
|
||||
);
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ const AppService = async () => {
|
||||
enabled: isEnabled(process.env.CHECK_BALANCE),
|
||||
startBalance: startBalance ? parseInt(startBalance, 10) : undefined,
|
||||
};
|
||||
const transactions = config.transactions ?? configDefaults.transactions;
|
||||
const imageOutputType = config?.imageOutputType ?? configDefaults.imageOutputType;
|
||||
|
||||
process.env.CDN_PROVIDER = fileStrategy;
|
||||
@@ -84,6 +85,7 @@ const AppService = async () => {
|
||||
memory,
|
||||
speech,
|
||||
balance,
|
||||
transactions,
|
||||
mcpConfig,
|
||||
webSearch,
|
||||
fileStrategy,
|
||||
|
||||
@@ -152,6 +152,7 @@ describe('AppService', () => {
|
||||
webSearch: expect.objectContaining({
|
||||
safeSearch: 1,
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
jinaApiUrl: '${JINA_API_URL}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
searxngApiKey: '${SEARXNG_API_KEY}',
|
||||
|
||||
@@ -3,12 +3,12 @@ const jwt = require('jsonwebtoken');
|
||||
const { webcrypto } = require('node:crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, checkEmailConfig } = require('@librechat/api');
|
||||
const { SystemRoles, errorsToString } = require('librechat-data-provider');
|
||||
const { ErrorTypes, SystemRoles, errorsToString } = require('librechat-data-provider');
|
||||
const {
|
||||
findUser,
|
||||
findToken,
|
||||
createUser,
|
||||
updateUser,
|
||||
findToken,
|
||||
countUsers,
|
||||
getUserById,
|
||||
findSession,
|
||||
@@ -181,6 +181,14 @@ const registerUser = async (user, additionalData = {}) => {
|
||||
|
||||
let newUserId;
|
||||
try {
|
||||
const appConfig = await getAppConfig();
|
||||
if (!isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
const errorMessage =
|
||||
'The email address provided cannot be used. Please use a different email address.';
|
||||
logger.error(`[registerUser] [Registration not allowed] [Email: ${user.email}]`);
|
||||
return { status: 403, message: errorMessage };
|
||||
}
|
||||
|
||||
const existingUser = await findUser({ email }, 'email _id');
|
||||
|
||||
if (existingUser) {
|
||||
@@ -195,14 +203,6 @@ const registerUser = async (user, additionalData = {}) => {
|
||||
return { status: 200, message: genericVerificationMessage };
|
||||
}
|
||||
|
||||
const appConfig = await getAppConfig({ role: user.role });
|
||||
if (!isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
const errorMessage =
|
||||
'The email address provided cannot be used. Please use a different email address.';
|
||||
logger.error(`[registerUser] [Registration not allowed] [Email: ${user.email}]`);
|
||||
return { status: 403, message: errorMessage };
|
||||
}
|
||||
|
||||
//determine if this is the first registered user (not counting anonymous_user)
|
||||
const isFirstRegisteredUser = (await countUsers()) === 0;
|
||||
|
||||
@@ -252,6 +252,13 @@ const registerUser = async (user, additionalData = {}) => {
|
||||
*/
|
||||
const requestPasswordReset = async (req) => {
|
||||
const { email } = req.body;
|
||||
const appConfig = await getAppConfig();
|
||||
if (!isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
const error = new Error(ErrorTypes.AUTH_FAILED);
|
||||
error.code = ErrorTypes.AUTH_FAILED;
|
||||
error.message = 'Email domain not allowed';
|
||||
return error;
|
||||
}
|
||||
const user = await findUser({ email }, 'email _id');
|
||||
const emailEnabled = checkEmailConfig();
|
||||
|
||||
@@ -350,23 +357,18 @@ const resetPassword = async (userId, token, password) => {
|
||||
|
||||
/**
|
||||
* Set Auth Tokens
|
||||
*
|
||||
* @param {String | ObjectId} userId
|
||||
* @param {Object} res
|
||||
* @param {String} sessionId
|
||||
* @param {ServerResponse} res
|
||||
* @param {ISession | null} [session=null]
|
||||
* @returns
|
||||
*/
|
||||
const setAuthTokens = async (userId, res, sessionId = null) => {
|
||||
const setAuthTokens = async (userId, res, _session = null) => {
|
||||
try {
|
||||
const user = await getUserById(userId);
|
||||
const token = await generateToken(user);
|
||||
|
||||
let session;
|
||||
let session = _session;
|
||||
let refreshToken;
|
||||
let refreshTokenExpires;
|
||||
|
||||
if (sessionId) {
|
||||
session = await findSession({ sessionId: sessionId }, { lean: false });
|
||||
if (session && session._id && session.expiration != null) {
|
||||
refreshTokenExpires = session.expiration.getTime();
|
||||
refreshToken = await generateRefreshToken(session);
|
||||
} else {
|
||||
@@ -376,6 +378,9 @@ const setAuthTokens = async (userId, res, sessionId = null) => {
|
||||
refreshTokenExpires = session.expiration.getTime();
|
||||
}
|
||||
|
||||
const user = await getUserById(userId);
|
||||
const token = await generateToken(user);
|
||||
|
||||
res.cookie('refreshToken', refreshToken, {
|
||||
expires: new Date(refreshTokenExpires),
|
||||
httpOnly: true,
|
||||
@@ -402,9 +407,10 @@ const setAuthTokens = async (userId, res, sessionId = null) => {
|
||||
* @param {import('openid-client').TokenEndpointResponse & import('openid-client').TokenEndpointResponseHelpers} tokenset
|
||||
* - The tokenset object containing access and refresh tokens
|
||||
* @param {Object} res - response object
|
||||
* @param {string} [userId] - Optional MongoDB user ID for image path validation
|
||||
* @returns {String} - access token
|
||||
*/
|
||||
const setOpenIDAuthTokens = (tokenset, res) => {
|
||||
const setOpenIDAuthTokens = (tokenset, res, userId) => {
|
||||
try {
|
||||
if (!tokenset) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
@@ -435,6 +441,18 @@ const setOpenIDAuthTokens = (tokenset, res) => {
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
if (userId && isEnabled(process.env.OPENID_REUSE_TOKENS)) {
|
||||
/** JWT-signed user ID cookie for image path validation when OPENID_REUSE_TOKENS is enabled */
|
||||
const signedUserId = jwt.sign({ id: userId }, process.env.JWT_REFRESH_SECRET, {
|
||||
expiresIn: expiryInMilliseconds / 1000,
|
||||
});
|
||||
res.cookie('openid_user_id', signedUserId, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
}
|
||||
return tokenset.access_token;
|
||||
} catch (error) {
|
||||
logger.error('[setOpenIDAuthTokens] Error in setting authentication tokens:', error);
|
||||
@@ -452,7 +470,7 @@ const setOpenIDAuthTokens = (tokenset, res) => {
|
||||
const resendVerificationEmail = async (req) => {
|
||||
try {
|
||||
const { email } = req.body;
|
||||
await deleteTokens(email);
|
||||
await deleteTokens({ email });
|
||||
const user = await findUser({ email }, 'email _id name');
|
||||
|
||||
if (!user) {
|
||||
|
||||
@@ -4,6 +4,8 @@ const AppService = require('~/server/services/AppService');
|
||||
const { setCachedTools } = require('./getCachedTools');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
const BASE_CONFIG_KEY = '_BASE_';
|
||||
|
||||
/**
|
||||
* Get the app configuration based on user context
|
||||
* @param {Object} [options]
|
||||
@@ -14,8 +16,8 @@ const getLogStores = require('~/cache/getLogStores');
|
||||
async function getAppConfig(options = {}) {
|
||||
const { role, refresh } = options;
|
||||
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
const cacheKey = role ? `${CacheKeys.APP_CONFIG}:${role}` : CacheKeys.APP_CONFIG;
|
||||
const cache = getLogStores(CacheKeys.APP_CONFIG);
|
||||
const cacheKey = role ? role : BASE_CONFIG_KEY;
|
||||
|
||||
if (!refresh) {
|
||||
const cached = await cache.get(cacheKey);
|
||||
@@ -24,7 +26,7 @@ async function getAppConfig(options = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
let baseConfig = await cache.get(CacheKeys.APP_CONFIG);
|
||||
let baseConfig = await cache.get(BASE_CONFIG_KEY);
|
||||
if (!baseConfig) {
|
||||
logger.info('[getAppConfig] App configuration not initialized. Initializing AppService...');
|
||||
baseConfig = await AppService();
|
||||
@@ -37,7 +39,7 @@ async function getAppConfig(options = {}) {
|
||||
await setCachedTools(baseConfig.availableTools, { isGlobal: true });
|
||||
}
|
||||
|
||||
await cache.set(CacheKeys.APP_CONFIG, baseConfig);
|
||||
await cache.set(BASE_CONFIG_KEY, baseConfig);
|
||||
}
|
||||
|
||||
// For now, return the base config
|
||||
|
||||
@@ -119,10 +119,6 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
||||
.filter((endpoint) => endpoint.customParams)
|
||||
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
|
||||
|
||||
if (customConfig.cache) {
|
||||
const cache = getLogStores(CacheKeys.STATIC_CONFIG);
|
||||
await cache.set(CacheKeys.LIBRECHAT_YAML_CONFIG, customConfig);
|
||||
}
|
||||
|
||||
if (result.data.modelSpecs) {
|
||||
customConfig.modelSpecs = result.data.modelSpecs;
|
||||
|
||||
@@ -48,16 +48,11 @@ const axios = require('axios');
|
||||
const { loadYaml } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
|
||||
describe('loadCustomConfig', () => {
|
||||
const mockSet = jest.fn();
|
||||
const mockCache = { set: mockSet };
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
delete process.env.CONFIG_PATH;
|
||||
getLogStores.mockReturnValue(mockCache);
|
||||
});
|
||||
|
||||
it('should return null and log error if remote config fetch fails', async () => {
|
||||
@@ -94,7 +89,6 @@ describe('loadCustomConfig', () => {
|
||||
const result = await loadCustomConfig();
|
||||
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockSet).toHaveBeenCalledWith(expect.anything(), mockConfig);
|
||||
});
|
||||
|
||||
it('should return null and log if config schema validation fails', async () => {
|
||||
@@ -134,7 +128,6 @@ describe('loadCustomConfig', () => {
|
||||
axios.get.mockResolvedValue({ data: mockConfig });
|
||||
const result = await loadCustomConfig();
|
||||
expect(result).toEqual(mockConfig);
|
||||
expect(mockSet).toHaveBeenCalledWith(expect.anything(), mockConfig);
|
||||
});
|
||||
|
||||
it('should return null if the remote config file is not found', async () => {
|
||||
@@ -168,7 +161,6 @@ describe('loadCustomConfig', () => {
|
||||
process.env.CONFIG_PATH = 'validConfig.yaml';
|
||||
loadYaml.mockReturnValueOnce(mockConfig);
|
||||
await loadCustomConfig();
|
||||
expect(mockSet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log the loaded custom config', async () => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const {
|
||||
primeResources,
|
||||
getModelMaxTokens,
|
||||
extractLibreChatParams,
|
||||
optionalChainWithEmptyCheck,
|
||||
} = require('@librechat/api');
|
||||
@@ -17,7 +18,6 @@ const { getProviderConfig } = require('~/server/services/Endpoints');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const { getFiles, getToolFilesByIds } = require('~/models/File');
|
||||
const { getConvoFiles } = require('~/models/Conversation');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
|
||||
/**
|
||||
* @param {object} params
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const { getLLMConfig } = require('@librechat/api');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
||||
const AnthropicClient = require('~/app/clients/AnthropicClient');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
|
||||
@@ -40,7 +40,6 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
proxy: PROXY ?? null,
|
||||
userId: req.user.id,
|
||||
reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null,
|
||||
modelOptions: endpointOption?.model_parameters ?? {},
|
||||
},
|
||||
@@ -49,6 +48,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
|
||||
if (overrideModel) {
|
||||
clientOptions.modelOptions.model = overrideModel;
|
||||
}
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
return getLLMConfig(anthropicApiKey, clientOptions);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
|
||||
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
|
||||
|
||||
/**
|
||||
* Generates configuration options for creating an Anthropic language model (LLM) instance.
|
||||
*
|
||||
* @param {string} apiKey - The API key for authentication with Anthropic.
|
||||
* @param {Object} [options={}] - Additional options for configuring the LLM.
|
||||
* @param {Object} [options.modelOptions] - Model-specific options.
|
||||
* @param {string} [options.modelOptions.model] - The name of the model to use.
|
||||
* @param {number} [options.modelOptions.maxOutputTokens] - The maximum number of tokens to generate.
|
||||
* @param {number} [options.modelOptions.temperature] - Controls randomness in output generation.
|
||||
* @param {number} [options.modelOptions.topP] - Controls diversity of output generation.
|
||||
* @param {number} [options.modelOptions.topK] - Controls the number of top tokens to consider.
|
||||
* @param {string[]} [options.modelOptions.stop] - Sequences where the API will stop generating further tokens.
|
||||
* @param {boolean} [options.modelOptions.stream] - Whether to stream the response.
|
||||
* @param {string} options.userId - The user ID for tracking and personalization.
|
||||
* @param {string} [options.proxy] - Proxy server URL.
|
||||
* @param {string} [options.reverseProxyUrl] - URL for a reverse proxy, if used.
|
||||
*
|
||||
* @returns {Object} Configuration options for creating an Anthropic LLM instance, with null and undefined values removed.
|
||||
*/
|
||||
function getLLMConfig(apiKey, options = {}) {
|
||||
const systemOptions = {
|
||||
thinking: options.modelOptions.thinking ?? anthropicSettings.thinking.default,
|
||||
promptCache: options.modelOptions.promptCache ?? anthropicSettings.promptCache.default,
|
||||
thinkingBudget: options.modelOptions.thinkingBudget ?? anthropicSettings.thinkingBudget.default,
|
||||
};
|
||||
for (let key in systemOptions) {
|
||||
delete options.modelOptions[key];
|
||||
}
|
||||
const defaultOptions = {
|
||||
model: anthropicSettings.model.default,
|
||||
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
|
||||
stream: true,
|
||||
};
|
||||
|
||||
const mergedOptions = Object.assign(defaultOptions, options.modelOptions);
|
||||
|
||||
/** @type {AnthropicClientOptions} */
|
||||
let requestOptions = {
|
||||
apiKey,
|
||||
model: mergedOptions.model,
|
||||
stream: mergedOptions.stream,
|
||||
temperature: mergedOptions.temperature,
|
||||
stopSequences: mergedOptions.stop,
|
||||
maxTokens:
|
||||
mergedOptions.maxOutputTokens || anthropicSettings.maxOutputTokens.reset(mergedOptions.model),
|
||||
clientOptions: {},
|
||||
invocationKwargs: {
|
||||
metadata: {
|
||||
user_id: options.userId,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
requestOptions = configureReasoning(requestOptions, systemOptions);
|
||||
|
||||
if (!/claude-3[-.]7/.test(mergedOptions.model)) {
|
||||
requestOptions.topP = mergedOptions.topP;
|
||||
requestOptions.topK = mergedOptions.topK;
|
||||
} else if (requestOptions.thinking == null) {
|
||||
requestOptions.topP = mergedOptions.topP;
|
||||
requestOptions.topK = mergedOptions.topK;
|
||||
}
|
||||
|
||||
const supportsCacheControl =
|
||||
systemOptions.promptCache === true && checkPromptCacheSupport(requestOptions.model);
|
||||
const headers = getClaudeHeaders(requestOptions.model, supportsCacheControl);
|
||||
if (headers) {
|
||||
requestOptions.clientOptions.defaultHeaders = headers;
|
||||
}
|
||||
|
||||
if (options.proxy) {
|
||||
const proxyAgent = new ProxyAgent(options.proxy);
|
||||
requestOptions.clientOptions.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
if (options.reverseProxyUrl) {
|
||||
requestOptions.clientOptions.baseURL = options.reverseProxyUrl;
|
||||
requestOptions.anthropicApiUrl = options.reverseProxyUrl;
|
||||
}
|
||||
|
||||
const tools = [];
|
||||
|
||||
if (mergedOptions.web_search) {
|
||||
tools.push({
|
||||
type: 'web_search_20250305',
|
||||
name: 'web_search',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
tools,
|
||||
/** @type {AnthropicClientOptions} */
|
||||
llmConfig: removeNullishValues(requestOptions),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { getLLMConfig };
|
||||
@@ -1,341 +0,0 @@
|
||||
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
||||
|
||||
jest.mock('https-proxy-agent', () => ({
|
||||
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
|
||||
}));
|
||||
|
||||
describe('getLLMConfig', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should create a basic configuration with default values', () => {
|
||||
const result = getLLMConfig('test-api-key', { modelOptions: {} });
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('apiKey', 'test-api-key');
|
||||
expect(result.llmConfig).toHaveProperty('model', 'claude-3-5-sonnet-latest');
|
||||
expect(result.llmConfig).toHaveProperty('stream', true);
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens');
|
||||
});
|
||||
|
||||
it('should include proxy settings when provided', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {},
|
||||
proxy: 'http://proxy:8080',
|
||||
});
|
||||
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
|
||||
'ProxyAgent',
|
||||
);
|
||||
});
|
||||
|
||||
it('should include reverse proxy URL when provided', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {},
|
||||
reverseProxyUrl: 'http://reverse-proxy',
|
||||
});
|
||||
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'http://reverse-proxy');
|
||||
expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'http://reverse-proxy');
|
||||
});
|
||||
|
||||
it('should include topK and topP for non-Claude-3.7 models', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
it('should include topK and topP for Claude-3.5 models', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3-7 models with thinking enabled (hyphen notation)', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('topK');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// When thinking is enabled, it uses the default thinkingBudget of 2000
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model', () => {
|
||||
const modelOptions = {
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
promptCache: true,
|
||||
};
|
||||
const result = getLLMConfig('test-key', { modelOptions });
|
||||
const clientOptions = result.llmConfig.clientOptions;
|
||||
expect(clientOptions.defaultHeaders).toBeDefined();
|
||||
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
|
||||
it('should add "prompt-caching" and "context-1m" beta headers for claude-sonnet-4 model formats', () => {
|
||||
const modelVariations = [
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-sonnet-4-latest',
|
||||
'anthropic/claude-sonnet-4-20250514',
|
||||
];
|
||||
|
||||
modelVariations.forEach((model) => {
|
||||
const modelOptions = { model, promptCache: true };
|
||||
const result = getLLMConfig('test-key', { modelOptions });
|
||||
const clientOptions = result.llmConfig.clientOptions;
|
||||
expect(clientOptions.defaultHeaders).toBeDefined();
|
||||
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
|
||||
'prompt-caching-2024-07-31,context-1m-2025-08-07',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should NOT include topK and topP for Claude-3.7 models with thinking enabled (decimal notation)', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('topK');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// When thinking is enabled, it uses the default thinkingBudget of 2000
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should handle custom maxOutputTokens', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
maxOutputTokens: 2048,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens', 2048);
|
||||
});
|
||||
|
||||
it('should handle promptCache setting', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
promptCache: true,
|
||||
},
|
||||
});
|
||||
|
||||
// We're not checking specific header values since that depends on the actual helper function
|
||||
// Just verifying that the promptCache setting is processed
|
||||
expect(result.llmConfig).toBeDefined();
|
||||
});
|
||||
|
||||
it('should include topK and topP for Claude-3.7 models when thinking is not enabled', () => {
|
||||
// Test with thinking explicitly set to null/undefined
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
||||
|
||||
// Test with thinking explicitly set to false
|
||||
const result2 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result2.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result2.llmConfig).toHaveProperty('topP', 0.9);
|
||||
|
||||
// Test with decimal notation as well
|
||||
const result3 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3.7-sonnet',
|
||||
topK: 10,
|
||||
topP: 0.9,
|
||||
thinking: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result3.llmConfig).toHaveProperty('topK', 10);
|
||||
expect(result3.llmConfig).toHaveProperty('topP', 0.9);
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle missing apiKey', () => {
|
||||
const result = getLLMConfig(undefined, { modelOptions: {} });
|
||||
expect(result.llmConfig).not.toHaveProperty('apiKey');
|
||||
});
|
||||
|
||||
it('should handle empty modelOptions', () => {
|
||||
expect(() => {
|
||||
getLLMConfig('test-api-key', {});
|
||||
}).toThrow("Cannot read properties of undefined (reading 'thinking')");
|
||||
});
|
||||
|
||||
it('should handle no options parameter', () => {
|
||||
expect(() => {
|
||||
getLLMConfig('test-api-key');
|
||||
}).toThrow("Cannot read properties of undefined (reading 'thinking')");
|
||||
});
|
||||
|
||||
it('should handle temperature, stop sequences, and stream settings', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
temperature: 0.7,
|
||||
stop: ['\n\n', 'END'],
|
||||
stream: false,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).toHaveProperty('temperature', 0.7);
|
||||
expect(result.llmConfig).toHaveProperty('stopSequences', ['\n\n', 'END']);
|
||||
expect(result.llmConfig).toHaveProperty('stream', false);
|
||||
});
|
||||
|
||||
it('should handle maxOutputTokens when explicitly set to falsy value', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-opus',
|
||||
maxOutputTokens: null,
|
||||
},
|
||||
});
|
||||
|
||||
// The actual anthropicSettings.maxOutputTokens.reset('claude-3-opus') returns 4096
|
||||
expect(result.llmConfig).toHaveProperty('maxTokens', 4096);
|
||||
});
|
||||
|
||||
it('should handle both proxy and reverseProxyUrl', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {},
|
||||
proxy: 'http://proxy:8080',
|
||||
reverseProxyUrl: 'https://reverse-proxy.com',
|
||||
});
|
||||
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
|
||||
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
|
||||
'ProxyAgent',
|
||||
);
|
||||
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'https://reverse-proxy.com');
|
||||
expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'https://reverse-proxy.com');
|
||||
});
|
||||
|
||||
it('should handle prompt cache with supported model', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-5-sonnet',
|
||||
promptCache: true,
|
||||
},
|
||||
});
|
||||
|
||||
// claude-3-5-sonnet supports prompt caching and should get the appropriate headers
|
||||
expect(result.llmConfig.clientOptions.defaultHeaders).toEqual({
|
||||
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle thinking and thinkingBudget options', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
thinking: true,
|
||||
thinkingBudget: 10000, // This exceeds the default max_tokens of 8192
|
||||
},
|
||||
});
|
||||
|
||||
// The function should add thinking configuration for claude-3-7 models
|
||||
expect(result.llmConfig).toHaveProperty('thinking');
|
||||
expect(result.llmConfig.thinking).toHaveProperty('type', 'enabled');
|
||||
// With claude-3-7-sonnet, the max_tokens default is 8192
|
||||
// Budget tokens gets adjusted to 90% of max_tokens (8192 * 0.9 = 7372) when it exceeds max_tokens
|
||||
expect(result.llmConfig.thinking).toHaveProperty('budget_tokens', 7372);
|
||||
|
||||
// Test with budget_tokens within max_tokens limit
|
||||
const result2 = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
model: 'claude-3-7-sonnet',
|
||||
thinking: true,
|
||||
thinkingBudget: 2000,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result2.llmConfig.thinking).toHaveProperty('budget_tokens', 2000);
|
||||
});
|
||||
|
||||
it('should remove system options from modelOptions', () => {
|
||||
const modelOptions = {
|
||||
model: 'claude-3-opus',
|
||||
thinking: true,
|
||||
promptCache: true,
|
||||
thinkingBudget: 1000,
|
||||
temperature: 0.5,
|
||||
};
|
||||
|
||||
getLLMConfig('test-api-key', { modelOptions });
|
||||
|
||||
expect(modelOptions).not.toHaveProperty('thinking');
|
||||
expect(modelOptions).not.toHaveProperty('promptCache');
|
||||
expect(modelOptions).not.toHaveProperty('thinkingBudget');
|
||||
expect(modelOptions).toHaveProperty('temperature', 0.5);
|
||||
});
|
||||
|
||||
it('should handle all nullish values removal', () => {
|
||||
const result = getLLMConfig('test-api-key', {
|
||||
modelOptions: {
|
||||
temperature: null,
|
||||
topP: undefined,
|
||||
topK: 0,
|
||||
stop: [],
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.llmConfig).not.toHaveProperty('temperature');
|
||||
expect(result.llmConfig).not.toHaveProperty('topP');
|
||||
expect(result.llmConfig).toHaveProperty('topK', 0);
|
||||
expect(result.llmConfig).toHaveProperty('stopSequences', []);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,3 +1,4 @@
|
||||
const { getModelMaxTokens } = require('@librechat/api');
|
||||
const { createContentAggregator } = require('@librechat/agents');
|
||||
const {
|
||||
EModelEndpoint,
|
||||
@@ -7,7 +8,6 @@ const {
|
||||
const { getDefaultHandlers } = require('~/server/controllers/agents/callbacks');
|
||||
const getOptions = require('~/server/services/Endpoints/bedrock/options');
|
||||
const AgentClient = require('~/server/controllers/agents/client');
|
||||
const { getModelMaxTokens } = require('~/utils');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption }) => {
|
||||
if (!endpointOption) {
|
||||
|
||||
@@ -159,9 +159,11 @@ class STTService {
|
||||
* Prepares the request for the OpenAI STT provider.
|
||||
* @param {Object} sttSchema - The STT schema for OpenAI.
|
||||
* @param {Stream} audioReadStream - The audio data to be transcribed.
|
||||
* @param {Object} audioFile - The audio file object (unused in OpenAI provider).
|
||||
* @param {string} language - The language code for the transcription.
|
||||
* @returns {Array} An array containing the URL, data, and headers for the request.
|
||||
*/
|
||||
openAIProvider(sttSchema, audioReadStream) {
|
||||
openAIProvider(sttSchema, audioReadStream, audioFile, language) {
|
||||
const url = sttSchema?.url || 'https://api.openai.com/v1/audio/transcriptions';
|
||||
const apiKey = extractEnvVariable(sttSchema.apiKey) || '';
|
||||
|
||||
@@ -170,6 +172,12 @@ class STTService {
|
||||
model: sttSchema.model,
|
||||
};
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
data.language = isoLanguage;
|
||||
}
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'multipart/form-data',
|
||||
...(apiKey && { Authorization: `Bearer ${apiKey}` }),
|
||||
@@ -184,10 +192,11 @@ class STTService {
|
||||
* @param {Object} sttSchema - The STT schema for Azure OpenAI.
|
||||
* @param {Buffer} audioBuffer - The audio data to be transcribed.
|
||||
* @param {Object} audioFile - The audio file object containing originalname, mimetype, and size.
|
||||
* @param {string} language - The language code for the transcription.
|
||||
* @returns {Array} An array containing the URL, data, and headers for the request.
|
||||
* @throws {Error} If the audio file size exceeds 25MB or the audio file format is not accepted.
|
||||
*/
|
||||
azureOpenAIProvider(sttSchema, audioBuffer, audioFile) {
|
||||
azureOpenAIProvider(sttSchema, audioBuffer, audioFile, language) {
|
||||
const url = `${genAzureEndpoint({
|
||||
azureOpenAIApiInstanceName: extractEnvVariable(sttSchema?.instanceName),
|
||||
azureOpenAIApiDeploymentName: extractEnvVariable(sttSchema?.deploymentName),
|
||||
@@ -211,6 +220,12 @@ class STTService {
|
||||
contentType: audioFile.mimetype,
|
||||
});
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
formData.append('language', isoLanguage);
|
||||
}
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'multipart/form-data',
|
||||
...(apiKey && { 'api-key': apiKey }),
|
||||
@@ -229,10 +244,11 @@ class STTService {
|
||||
* @param {Object} requestData - The data required for the STT request.
|
||||
* @param {Buffer} requestData.audioBuffer - The audio data to be transcribed.
|
||||
* @param {Object} requestData.audioFile - The audio file object containing originalname, mimetype, and size.
|
||||
* @param {string} requestData.language - The language code for the transcription.
|
||||
* @returns {Promise<string>} A promise that resolves to the transcribed text.
|
||||
* @throws {Error} If the provider is invalid, the response status is not 200, or the response data is missing.
|
||||
*/
|
||||
async sttRequest(provider, sttSchema, { audioBuffer, audioFile }) {
|
||||
async sttRequest(provider, sttSchema, { audioBuffer, audioFile, language }) {
|
||||
const strategy = this.providerStrategies[provider];
|
||||
if (!strategy) {
|
||||
throw new Error('Invalid provider');
|
||||
@@ -243,7 +259,13 @@ class STTService {
|
||||
const audioReadStream = Readable.from(audioBuffer);
|
||||
audioReadStream.path = `audio.${fileExtension}`;
|
||||
|
||||
const [url, data, headers] = strategy.call(this, sttSchema, audioReadStream, audioFile);
|
||||
const [url, data, headers] = strategy.call(
|
||||
this,
|
||||
sttSchema,
|
||||
audioReadStream,
|
||||
audioFile,
|
||||
language,
|
||||
);
|
||||
|
||||
try {
|
||||
const response = await axios.post(url, data, { headers });
|
||||
@@ -284,7 +306,8 @@ class STTService {
|
||||
|
||||
try {
|
||||
const [provider, sttSchema] = await this.getProviderSchema(req);
|
||||
const text = await this.sttRequest(provider, sttSchema, { audioBuffer, audioFile });
|
||||
const language = req.body?.language || '';
|
||||
const text = await this.sttRequest(provider, sttSchema, { audioBuffer, audioFile, language });
|
||||
res.json({ text });
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while processing the audio:', error);
|
||||
|
||||
@@ -17,7 +17,7 @@ const { Files } = require('~/models');
|
||||
* @param {IUser} options.user - The user object
|
||||
* @param {AppConfig} options.appConfig - The app configuration object
|
||||
* @param {GraphRunnableConfig['configurable']} options.metadata - The metadata
|
||||
* @param {any} options.toolArtifact - The tool artifact containing structured data
|
||||
* @param {{ [Tools.file_search]: { sources: Object[]; fileCitations: boolean } }} options.toolArtifact - The tool artifact containing structured data
|
||||
* @param {string} options.toolCallId - The tool call ID
|
||||
* @returns {Promise<Object|null>} The file search attachment or null
|
||||
*/
|
||||
@@ -29,12 +29,14 @@ async function processFileCitations({ user, appConfig, toolArtifact, toolCallId,
|
||||
|
||||
if (user) {
|
||||
try {
|
||||
const hasFileCitationsAccess = await checkAccess({
|
||||
user,
|
||||
permissionType: PermissionTypes.FILE_CITATIONS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
});
|
||||
const hasFileCitationsAccess =
|
||||
toolArtifact?.[Tools.file_search]?.fileCitations ??
|
||||
(await checkAccess({
|
||||
user,
|
||||
permissionType: PermissionTypes.FILE_CITATIONS,
|
||||
permissions: [Permissions.USE],
|
||||
getRoleByName,
|
||||
}));
|
||||
|
||||
if (!hasFileCitationsAccess) {
|
||||
logger.debug(
|
||||
|
||||
@@ -20,8 +20,8 @@ const {
|
||||
ContentTypes,
|
||||
isAssistantsEndpoint,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMCPManager, getFlowStateManager, getOAuthReconnectionManager } = require('~/config');
|
||||
const { findToken, createToken, updateToken } = require('~/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getCachedTools, getAppConfig } = require('./Config');
|
||||
const { reinitMCPServer } = require('./Tools/mcp');
|
||||
const { getLogStores } = require('~/cache');
|
||||
@@ -538,13 +538,20 @@ async function getServerConnectionStatus(
|
||||
const baseConnectionState = getConnectionState();
|
||||
let finalConnectionState = baseConnectionState;
|
||||
|
||||
// connection state overrides specific to OAuth servers
|
||||
if (baseConnectionState === 'disconnected' && oauthServers.has(serverName)) {
|
||||
const { hasActiveFlow, hasFailedFlow } = await checkOAuthFlowStatus(userId, serverName);
|
||||
|
||||
if (hasFailedFlow) {
|
||||
finalConnectionState = 'error';
|
||||
} else if (hasActiveFlow) {
|
||||
// check if server is actively being reconnected
|
||||
const oauthReconnectionManager = getOAuthReconnectionManager();
|
||||
if (oauthReconnectionManager.isReconnecting(userId, serverName)) {
|
||||
finalConnectionState = 'connecting';
|
||||
} else {
|
||||
const { hasActiveFlow, hasFailedFlow } = await checkOAuthFlowStatus(userId, serverName);
|
||||
|
||||
if (hasFailedFlow) {
|
||||
finalConnectionState = 'error';
|
||||
} else if (hasActiveFlow) {
|
||||
finalConnectionState = 'connecting';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@ jest.mock('./Config', () => ({
|
||||
jest.mock('~/config', () => ({
|
||||
getMCPManager: jest.fn(),
|
||||
getFlowStateManager: jest.fn(),
|
||||
getOAuthReconnectionManager: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/cache', () => ({
|
||||
@@ -48,6 +49,7 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
let mockGetMCPManager;
|
||||
let mockGetFlowStateManager;
|
||||
let mockGetLogStores;
|
||||
let mockGetOAuthReconnectionManager;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@@ -56,6 +58,7 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
mockGetMCPManager = require('~/config').getMCPManager;
|
||||
mockGetFlowStateManager = require('~/config').getFlowStateManager;
|
||||
mockGetLogStores = require('~/cache').getLogStores;
|
||||
mockGetOAuthReconnectionManager = require('~/config').getOAuthReconnectionManager;
|
||||
});
|
||||
|
||||
describe('getMCPSetupData', () => {
|
||||
@@ -354,6 +357,12 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
const userConnections = new Map();
|
||||
const oauthServers = new Set([mockServerName]);
|
||||
|
||||
// Mock OAuthReconnectionManager
|
||||
const mockOAuthReconnectionManager = {
|
||||
isReconnecting: jest.fn(() => false),
|
||||
};
|
||||
mockGetOAuthReconnectionManager.mockReturnValue(mockOAuthReconnectionManager);
|
||||
|
||||
const result = await getServerConnectionStatus(
|
||||
mockUserId,
|
||||
mockServerName,
|
||||
@@ -370,6 +379,12 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
const userConnections = new Map();
|
||||
const oauthServers = new Set([mockServerName]);
|
||||
|
||||
// Mock OAuthReconnectionManager
|
||||
const mockOAuthReconnectionManager = {
|
||||
isReconnecting: jest.fn(() => false),
|
||||
};
|
||||
mockGetOAuthReconnectionManager.mockReturnValue(mockOAuthReconnectionManager);
|
||||
|
||||
// Mock flow state to return failed flow
|
||||
const mockFlowManager = {
|
||||
getFlowState: jest.fn(() => ({
|
||||
@@ -401,6 +416,12 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
const userConnections = new Map();
|
||||
const oauthServers = new Set([mockServerName]);
|
||||
|
||||
// Mock OAuthReconnectionManager
|
||||
const mockOAuthReconnectionManager = {
|
||||
isReconnecting: jest.fn(() => false),
|
||||
};
|
||||
mockGetOAuthReconnectionManager.mockReturnValue(mockOAuthReconnectionManager);
|
||||
|
||||
// Mock flow state to return active flow
|
||||
const mockFlowManager = {
|
||||
getFlowState: jest.fn(() => ({
|
||||
@@ -432,6 +453,12 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
const userConnections = new Map();
|
||||
const oauthServers = new Set([mockServerName]);
|
||||
|
||||
// Mock OAuthReconnectionManager
|
||||
const mockOAuthReconnectionManager = {
|
||||
isReconnecting: jest.fn(() => false),
|
||||
};
|
||||
mockGetOAuthReconnectionManager.mockReturnValue(mockOAuthReconnectionManager);
|
||||
|
||||
// Mock flow state to return no flow
|
||||
const mockFlowManager = {
|
||||
getFlowState: jest.fn(() => null),
|
||||
@@ -454,6 +481,35 @@ describe('tests for the new helper functions used by the MCP connection status e
|
||||
});
|
||||
});
|
||||
|
||||
it('should return connecting state when OAuth server is reconnecting', async () => {
|
||||
const appConnections = new Map();
|
||||
const userConnections = new Map();
|
||||
const oauthServers = new Set([mockServerName]);
|
||||
|
||||
// Mock OAuthReconnectionManager to return true for isReconnecting
|
||||
const mockOAuthReconnectionManager = {
|
||||
isReconnecting: jest.fn(() => true),
|
||||
};
|
||||
mockGetOAuthReconnectionManager.mockReturnValue(mockOAuthReconnectionManager);
|
||||
|
||||
const result = await getServerConnectionStatus(
|
||||
mockUserId,
|
||||
mockServerName,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
requiresOAuth: true,
|
||||
connectionState: 'connecting',
|
||||
});
|
||||
expect(mockOAuthReconnectionManager.isReconnecting).toHaveBeenCalledWith(
|
||||
mockUserId,
|
||||
mockServerName,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not check OAuth flow status when server is connected', async () => {
|
||||
const mockFlowManager = {
|
||||
getFlowState: jest.fn(),
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
const axios = require('axios');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { logAxiosError, inputSchema, processModelData } = require('@librechat/api');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { inputSchema, extractBaseURL, processModelData } = require('~/utils');
|
||||
const { OllamaClient } = require('~/app/clients/OllamaClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
|
||||
/**
|
||||
* Splits a string by commas and trims each resulting value.
|
||||
|
||||
@@ -11,8 +11,8 @@ const {
|
||||
getAnthropicModels,
|
||||
} = require('./ModelService');
|
||||
|
||||
jest.mock('~/utils', () => {
|
||||
const originalUtils = jest.requireActual('~/utils');
|
||||
jest.mock('@librechat/api', () => {
|
||||
const originalUtils = jest.requireActual('@librechat/api');
|
||||
return {
|
||||
...originalUtils,
|
||||
processModelData: jest.fn((...args) => {
|
||||
@@ -108,7 +108,7 @@ describe('fetchModels with createTokenConfig true', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
// Clears the mock's history before each test
|
||||
const _utils = require('~/utils');
|
||||
const _utils = require('@librechat/api');
|
||||
axios.get.mockResolvedValue({ data });
|
||||
});
|
||||
|
||||
@@ -120,7 +120,7 @@ describe('fetchModels with createTokenConfig true', () => {
|
||||
createTokenConfig: true,
|
||||
});
|
||||
|
||||
const { processModelData } = require('~/utils');
|
||||
const { processModelData } = require('@librechat/api');
|
||||
expect(processModelData).toHaveBeenCalled();
|
||||
expect(processModelData).toHaveBeenCalledWith(data);
|
||||
});
|
||||
|
||||
@@ -313,7 +313,7 @@ const ensurePrincipalExists = async function (principal) {
|
||||
idOnTheSource: principal.idOnTheSource,
|
||||
};
|
||||
|
||||
const userId = await createUser(userData, true, false);
|
||||
const userId = await createUser(userData, true, true);
|
||||
return userId.toString();
|
||||
}
|
||||
|
||||
|
||||
26
api/server/services/initializeOAuthReconnectManager.js
Normal file
26
api/server/services/initializeOAuthReconnectManager.js
Normal file
@@ -0,0 +1,26 @@
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { createOAuthReconnectionManager, getFlowStateManager } = require('~/config');
|
||||
const { findToken, updateToken, createToken, deleteTokens } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
/**
|
||||
* Initialize OAuth reconnect manager
|
||||
*/
|
||||
async function initializeOAuthReconnectManager() {
|
||||
try {
|
||||
const flowManager = getFlowStateManager(getLogStores(CacheKeys.FLOWS));
|
||||
const tokenMethods = {
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
deleteTokens,
|
||||
};
|
||||
await createOAuthReconnectionManager(flowManager, tokenMethods);
|
||||
logger.info(`OAuth reconnect manager initialized successfully.`);
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize OAuth reconnect manager:', error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = initializeOAuthReconnectManager;
|
||||
@@ -229,7 +229,7 @@
|
||||
>
|
||||
<!--[if mso]><style>.v-button {background: transparent !important;}</style><![endif]-->
|
||||
<div align='left'>
|
||||
<!--[if mso]><v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href="href="{{verificationLink}}"" style="height:37px; v-text-anchor:middle; width:114px;" arcsize="11%" stroke="f" fillcolor="#10a37f"><w:anchorlock/><center style="color:#FFFFFF;"><![endif]-->
|
||||
<!--[if mso]><v:roundrect xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w="urn:schemas-microsoft-com:office:word" href="{{verificationLink}}" style="height:37px; v-text-anchor:middle; width:114px;" arcsize="11%" stroke="f" fillcolor="#10a37f"><w:anchorlock/><center style="color:#FFFFFF;"><![endif]-->
|
||||
<a
|
||||
href='{{verificationLink}}'
|
||||
target='_blank'
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EModelEndpoint, Constants, openAISettings, CacheKeys } = require('librechat-data-provider');
|
||||
const { createImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { cloneMessagesWithTimestamps } = require('./fork');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* Returns the appropriate importer function based on the provided JSON data.
|
||||
@@ -212,6 +212,29 @@ function processConversation(conv, importBatchBuilder, requestUserId) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to find the nearest non-system parent
|
||||
* @param {string} parentId - The ID of the parent message.
|
||||
* @returns {string} The ID of the nearest non-system parent message.
|
||||
*/
|
||||
const findNonSystemParent = (parentId) => {
|
||||
if (!parentId || !messageMap.has(parentId)) {
|
||||
return Constants.NO_PARENT;
|
||||
}
|
||||
|
||||
const parentMapping = conv.mapping[parentId];
|
||||
if (!parentMapping?.message) {
|
||||
return Constants.NO_PARENT;
|
||||
}
|
||||
|
||||
/* If parent is a system message, traverse up to find the nearest non-system parent */
|
||||
if (parentMapping.message.author?.role === 'system') {
|
||||
return findNonSystemParent(parentMapping.parent);
|
||||
}
|
||||
|
||||
return messageMap.get(parentId);
|
||||
};
|
||||
|
||||
// Create and save messages using the mapped IDs
|
||||
const messages = [];
|
||||
for (const [id, mapping] of Object.entries(conv.mapping)) {
|
||||
@@ -220,23 +243,27 @@ function processConversation(conv, importBatchBuilder, requestUserId) {
|
||||
messageMap.delete(id);
|
||||
continue;
|
||||
} else if (role === 'system') {
|
||||
messageMap.delete(id);
|
||||
// Skip system messages but keep their ID in messageMap for parent references
|
||||
continue;
|
||||
}
|
||||
|
||||
const newMessageId = messageMap.get(id);
|
||||
const parentMessageId =
|
||||
mapping.parent && messageMap.has(mapping.parent)
|
||||
? messageMap.get(mapping.parent)
|
||||
: Constants.NO_PARENT;
|
||||
const parentMessageId = findNonSystemParent(mapping.parent);
|
||||
|
||||
const messageText = formatMessageText(mapping.message);
|
||||
|
||||
const isCreatedByUser = role === 'user';
|
||||
let sender = isCreatedByUser ? 'user' : 'GPT-3.5';
|
||||
let sender = isCreatedByUser ? 'user' : 'assistant';
|
||||
const model = mapping.message.metadata.model_slug || openAISettings.model.default;
|
||||
if (model.includes('gpt-4')) {
|
||||
sender = 'GPT-4';
|
||||
|
||||
if (!isCreatedByUser) {
|
||||
/** Extracted model name from model slug */
|
||||
const gptMatch = model.match(/gpt-(.+)/i);
|
||||
if (gptMatch) {
|
||||
sender = `GPT-${gptMatch[1]}`;
|
||||
} else {
|
||||
sender = model || 'assistant';
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
|
||||
@@ -99,6 +99,404 @@ describe('importChatGptConvo', () => {
|
||||
|
||||
expect(importBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle system messages without breaking parent-child relationships', async () => {
|
||||
/**
|
||||
* Test data that reproduces message graph "breaking" when it encounters a system message
|
||||
*/
|
||||
const testData = [
|
||||
{
|
||||
title: 'System Message Parent Test',
|
||||
create_time: 1714585031.148505,
|
||||
update_time: 1714585060.879308,
|
||||
mapping: {
|
||||
'root-node': {
|
||||
id: 'root-node',
|
||||
message: null,
|
||||
parent: null,
|
||||
children: ['user-msg-1'],
|
||||
},
|
||||
'user-msg-1': {
|
||||
id: 'user-msg-1',
|
||||
message: {
|
||||
id: 'user-msg-1',
|
||||
author: { role: 'user' },
|
||||
create_time: 1714585031.150442,
|
||||
content: { content_type: 'text', parts: ['First user message'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'root-node',
|
||||
children: ['assistant-msg-1'],
|
||||
},
|
||||
'assistant-msg-1': {
|
||||
id: 'assistant-msg-1',
|
||||
message: {
|
||||
id: 'assistant-msg-1',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585032.150442,
|
||||
content: { content_type: 'text', parts: ['First assistant response'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'user-msg-1',
|
||||
children: ['system-msg'],
|
||||
},
|
||||
'system-msg': {
|
||||
id: 'system-msg',
|
||||
message: {
|
||||
id: 'system-msg',
|
||||
author: { role: 'system' },
|
||||
create_time: 1714585033.150442,
|
||||
content: { content_type: 'text', parts: ['System message in middle'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'assistant-msg-1',
|
||||
children: ['user-msg-2'],
|
||||
},
|
||||
'user-msg-2': {
|
||||
id: 'user-msg-2',
|
||||
message: {
|
||||
id: 'user-msg-2',
|
||||
author: { role: 'user' },
|
||||
create_time: 1714585034.150442,
|
||||
content: { content_type: 'text', parts: ['Second user message'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'system-msg',
|
||||
children: ['assistant-msg-2'],
|
||||
},
|
||||
'assistant-msg-2': {
|
||||
id: 'assistant-msg-2',
|
||||
message: {
|
||||
id: 'assistant-msg-2',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585035.150442,
|
||||
content: { content_type: 'text', parts: ['Second assistant response'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'user-msg-2',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
const importer = getImporter(testData);
|
||||
await importer(testData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
/** 2 user messages + 2 assistant messages (system message should be skipped) */
|
||||
const expectedMessages = 4;
|
||||
expect(importBatchBuilder.saveMessage).toHaveBeenCalledTimes(expectedMessages);
|
||||
|
||||
const savedMessages = importBatchBuilder.saveMessage.mock.calls.map((call) => call[0]);
|
||||
|
||||
const messageMap = new Map();
|
||||
savedMessages.forEach((msg) => {
|
||||
messageMap.set(msg.text, msg);
|
||||
});
|
||||
|
||||
const firstUser = messageMap.get('First user message');
|
||||
const firstAssistant = messageMap.get('First assistant response');
|
||||
const secondUser = messageMap.get('Second user message');
|
||||
const secondAssistant = messageMap.get('Second assistant response');
|
||||
|
||||
expect(firstUser).toBeDefined();
|
||||
expect(firstAssistant).toBeDefined();
|
||||
expect(secondUser).toBeDefined();
|
||||
expect(secondAssistant).toBeDefined();
|
||||
expect(firstUser.parentMessageId).toBe(Constants.NO_PARENT);
|
||||
expect(firstAssistant.parentMessageId).toBe(firstUser.messageId);
|
||||
|
||||
// This is the key test: second user message should have first assistant as parent
|
||||
// (not NO_PARENT which would indicate the system message broke the chain)
|
||||
expect(secondUser.parentMessageId).toBe(firstAssistant.messageId);
|
||||
expect(secondAssistant.parentMessageId).toBe(secondUser.messageId);
|
||||
});
|
||||
|
||||
it('should maintain correct sender for user messages regardless of GPT-4 model', async () => {
|
||||
/**
|
||||
* Test data with GPT-4 model to ensure user messages keep 'user' sender
|
||||
*/
|
||||
const testData = [
|
||||
{
|
||||
title: 'GPT-4 Sender Test',
|
||||
create_time: 1714585031.148505,
|
||||
update_time: 1714585060.879308,
|
||||
mapping: {
|
||||
'root-node': {
|
||||
id: 'root-node',
|
||||
message: null,
|
||||
parent: null,
|
||||
children: ['user-msg-1'],
|
||||
},
|
||||
'user-msg-1': {
|
||||
id: 'user-msg-1',
|
||||
message: {
|
||||
id: 'user-msg-1',
|
||||
author: { role: 'user' },
|
||||
create_time: 1714585031.150442,
|
||||
content: { content_type: 'text', parts: ['User message with GPT-4'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'root-node',
|
||||
children: ['assistant-msg-1'],
|
||||
},
|
||||
'assistant-msg-1': {
|
||||
id: 'assistant-msg-1',
|
||||
message: {
|
||||
id: 'assistant-msg-1',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585032.150442,
|
||||
content: { content_type: 'text', parts: ['Assistant response with GPT-4'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'user-msg-1',
|
||||
children: ['user-msg-2'],
|
||||
},
|
||||
'user-msg-2': {
|
||||
id: 'user-msg-2',
|
||||
message: {
|
||||
id: 'user-msg-2',
|
||||
author: { role: 'user' },
|
||||
create_time: 1714585033.150442,
|
||||
content: { content_type: 'text', parts: ['Another user message with GPT-4o-mini'] },
|
||||
metadata: { model_slug: 'gpt-4o-mini' },
|
||||
},
|
||||
parent: 'assistant-msg-1',
|
||||
children: ['assistant-msg-2'],
|
||||
},
|
||||
'assistant-msg-2': {
|
||||
id: 'assistant-msg-2',
|
||||
message: {
|
||||
id: 'assistant-msg-2',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585034.150442,
|
||||
content: { content_type: 'text', parts: ['Assistant response with GPT-3.5'] },
|
||||
metadata: { model_slug: 'gpt-3.5-turbo' },
|
||||
},
|
||||
parent: 'user-msg-2',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
const importer = getImporter(testData);
|
||||
await importer(testData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
const savedMessages = importBatchBuilder.saveMessage.mock.calls.map((call) => call[0]);
|
||||
|
||||
const userMsg1 = savedMessages.find((msg) => msg.text === 'User message with GPT-4');
|
||||
const assistantMsg1 = savedMessages.find((msg) => msg.text === 'Assistant response with GPT-4');
|
||||
const userMsg2 = savedMessages.find(
|
||||
(msg) => msg.text === 'Another user message with GPT-4o-mini',
|
||||
);
|
||||
const assistantMsg2 = savedMessages.find(
|
||||
(msg) => msg.text === 'Assistant response with GPT-3.5',
|
||||
);
|
||||
|
||||
expect(userMsg1.sender).toBe('user');
|
||||
expect(userMsg1.isCreatedByUser).toBe(true);
|
||||
expect(userMsg1.model).toBe('gpt-4');
|
||||
|
||||
expect(userMsg2.sender).toBe('user');
|
||||
expect(userMsg2.isCreatedByUser).toBe(true);
|
||||
expect(userMsg2.model).toBe('gpt-4o-mini');
|
||||
|
||||
expect(assistantMsg1.sender).toBe('GPT-4');
|
||||
expect(assistantMsg1.isCreatedByUser).toBe(false);
|
||||
expect(assistantMsg1.model).toBe('gpt-4');
|
||||
|
||||
expect(assistantMsg2.sender).toBe('GPT-3.5-turbo');
|
||||
expect(assistantMsg2.isCreatedByUser).toBe(false);
|
||||
expect(assistantMsg2.model).toBe('gpt-3.5-turbo');
|
||||
});
|
||||
|
||||
it('should correctly extract and format model names from various model slugs', async () => {
|
||||
/**
|
||||
* Test data with various model slugs to test dynamic model identifier extraction
|
||||
*/
|
||||
const testData = [
|
||||
{
|
||||
title: 'Dynamic Model Identifier Test',
|
||||
create_time: 1714585031.148505,
|
||||
update_time: 1714585060.879308,
|
||||
mapping: {
|
||||
'root-node': {
|
||||
id: 'root-node',
|
||||
message: null,
|
||||
parent: null,
|
||||
children: ['msg-1'],
|
||||
},
|
||||
'msg-1': {
|
||||
id: 'msg-1',
|
||||
message: {
|
||||
id: 'msg-1',
|
||||
author: { role: 'user' },
|
||||
create_time: 1714585031.150442,
|
||||
content: { content_type: 'text', parts: ['Test message'] },
|
||||
metadata: {},
|
||||
},
|
||||
parent: 'root-node',
|
||||
children: ['msg-2', 'msg-3', 'msg-4', 'msg-5', 'msg-6', 'msg-7', 'msg-8', 'msg-9'],
|
||||
},
|
||||
'msg-2': {
|
||||
id: 'msg-2',
|
||||
message: {
|
||||
id: 'msg-2',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585032.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-4 response'] },
|
||||
metadata: { model_slug: 'gpt-4' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-3': {
|
||||
id: 'msg-3',
|
||||
message: {
|
||||
id: 'msg-3',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585033.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-4o response'] },
|
||||
metadata: { model_slug: 'gpt-4o' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-4': {
|
||||
id: 'msg-4',
|
||||
message: {
|
||||
id: 'msg-4',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585034.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-4o-mini response'] },
|
||||
metadata: { model_slug: 'gpt-4o-mini' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-5': {
|
||||
id: 'msg-5',
|
||||
message: {
|
||||
id: 'msg-5',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585035.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-3.5-turbo response'] },
|
||||
metadata: { model_slug: 'gpt-3.5-turbo' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-6': {
|
||||
id: 'msg-6',
|
||||
message: {
|
||||
id: 'msg-6',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585036.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-4-turbo response'] },
|
||||
metadata: { model_slug: 'gpt-4-turbo' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-7': {
|
||||
id: 'msg-7',
|
||||
message: {
|
||||
id: 'msg-7',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585037.150442,
|
||||
content: { content_type: 'text', parts: ['GPT-4-1106-preview response'] },
|
||||
metadata: { model_slug: 'gpt-4-1106-preview' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-8': {
|
||||
id: 'msg-8',
|
||||
message: {
|
||||
id: 'msg-8',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585038.150442,
|
||||
content: { content_type: 'text', parts: ['Claude response'] },
|
||||
metadata: { model_slug: 'claude-3-opus' },
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
'msg-9': {
|
||||
id: 'msg-9',
|
||||
message: {
|
||||
id: 'msg-9',
|
||||
author: { role: 'assistant' },
|
||||
create_time: 1714585039.150442,
|
||||
content: { content_type: 'text', parts: ['No model slug response'] },
|
||||
metadata: {},
|
||||
},
|
||||
parent: 'msg-1',
|
||||
children: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const importBatchBuilder = new ImportBatchBuilder(requestUserId);
|
||||
jest.spyOn(importBatchBuilder, 'saveMessage');
|
||||
|
||||
const importer = getImporter(testData);
|
||||
await importer(testData, requestUserId, () => importBatchBuilder);
|
||||
|
||||
const savedMessages = importBatchBuilder.saveMessage.mock.calls.map((call) => call[0]);
|
||||
|
||||
// Test various GPT model slug formats
|
||||
const gpt4 = savedMessages.find((msg) => msg.text === 'GPT-4 response');
|
||||
expect(gpt4.sender).toBe('GPT-4');
|
||||
expect(gpt4.model).toBe('gpt-4');
|
||||
|
||||
const gpt4o = savedMessages.find((msg) => msg.text === 'GPT-4o response');
|
||||
expect(gpt4o.sender).toBe('GPT-4o');
|
||||
expect(gpt4o.model).toBe('gpt-4o');
|
||||
|
||||
const gpt4oMini = savedMessages.find((msg) => msg.text === 'GPT-4o-mini response');
|
||||
expect(gpt4oMini.sender).toBe('GPT-4o-mini');
|
||||
expect(gpt4oMini.model).toBe('gpt-4o-mini');
|
||||
|
||||
const gpt35Turbo = savedMessages.find((msg) => msg.text === 'GPT-3.5-turbo response');
|
||||
expect(gpt35Turbo.sender).toBe('GPT-3.5-turbo');
|
||||
expect(gpt35Turbo.model).toBe('gpt-3.5-turbo');
|
||||
|
||||
const gpt4Turbo = savedMessages.find((msg) => msg.text === 'GPT-4-turbo response');
|
||||
expect(gpt4Turbo.sender).toBe('GPT-4-turbo');
|
||||
expect(gpt4Turbo.model).toBe('gpt-4-turbo');
|
||||
|
||||
const gpt4Preview = savedMessages.find((msg) => msg.text === 'GPT-4-1106-preview response');
|
||||
expect(gpt4Preview.sender).toBe('GPT-4-1106-preview');
|
||||
expect(gpt4Preview.model).toBe('gpt-4-1106-preview');
|
||||
|
||||
// Test non-GPT model (should use the model slug as sender)
|
||||
const claude = savedMessages.find((msg) => msg.text === 'Claude response');
|
||||
expect(claude.sender).toBe('claude-3-opus');
|
||||
expect(claude.model).toBe('claude-3-opus');
|
||||
|
||||
// Test missing model slug (should default to openAISettings.model.default)
|
||||
const noModel = savedMessages.find((msg) => msg.text === 'No model slug response');
|
||||
// When no model slug is provided, it defaults to gpt-4o-mini which gets formatted to GPT-4o-mini
|
||||
expect(noModel.sender).toBe('GPT-4o-mini');
|
||||
expect(noModel.model).toBe(openAISettings.model.default);
|
||||
|
||||
// Verify user message is unaffected
|
||||
const userMsg = savedMessages.find((msg) => msg.text === 'Test message');
|
||||
expect(userMsg.sender).toBe('user');
|
||||
expect(userMsg.isCreatedByUser).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('importLibreChatConvo', () => {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const jwt = require('jsonwebtoken');
|
||||
const mongoose = require('mongoose');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Strategy: AppleStrategy } = require('passport-apple');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { createSocialUser, handleExistingUser } = require('./process');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const socialLogin = require('./socialLogin');
|
||||
const { findUser } = require('~/models');
|
||||
const { User } = require('~/db/models');
|
||||
@@ -17,6 +17,8 @@ jest.mock('@librechat/data-schemas', () => {
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
@@ -24,12 +26,19 @@ jest.mock('./process', () => ({
|
||||
createSocialUser: jest.fn(),
|
||||
handleExistingUser: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/server/utils', () => ({
|
||||
jest.mock('@librechat/api', () => ({
|
||||
...jest.requireActual('@librechat/api'),
|
||||
isEnabled: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/models', () => ({
|
||||
findUser: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/server/services/Config', () => ({
|
||||
getAppConfig: jest.fn().mockResolvedValue({
|
||||
fileStrategy: 'local',
|
||||
balance: { enabled: false },
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('Apple Login Strategy', () => {
|
||||
let mongoServer;
|
||||
@@ -288,7 +297,14 @@ describe('Apple Login Strategy', () => {
|
||||
|
||||
expect(mockVerifyCallback).toHaveBeenCalledWith(null, existingUser);
|
||||
expect(existingUser.avatarUrl).toBeNull(); // As per getProfileDetails
|
||||
expect(handleExistingUser).toHaveBeenCalledWith(existingUser, null);
|
||||
expect(handleExistingUser).toHaveBeenCalledWith(
|
||||
existingUser,
|
||||
null,
|
||||
expect.objectContaining({
|
||||
fileStrategy: 'local',
|
||||
balance: { enabled: false },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing idToken gracefully', async () => {
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const appleLogin = require('./appleStrategy');
|
||||
const { setupOpenId, getOpenIdConfig } = require('./openidStrategy');
|
||||
const openIdJwtLogin = require('./openIdJwtStrategy');
|
||||
const facebookLogin = require('./facebookStrategy');
|
||||
const discordLogin = require('./discordStrategy');
|
||||
const passportLogin = require('./localStrategy');
|
||||
const googleLogin = require('./googleStrategy');
|
||||
const githubLogin = require('./githubStrategy');
|
||||
const discordLogin = require('./discordStrategy');
|
||||
const facebookLogin = require('./facebookStrategy');
|
||||
const { setupOpenId, getOpenIdConfig } = require('./openidStrategy');
|
||||
const jwtLogin = require('./jwtStrategy');
|
||||
const ldapLogin = require('./ldapStrategy');
|
||||
const { setupSaml } = require('./samlStrategy');
|
||||
const openIdJwtLogin = require('./openIdJwtStrategy');
|
||||
const appleLogin = require('./appleStrategy');
|
||||
const ldapLogin = require('./ldapStrategy');
|
||||
const jwtLogin = require('./jwtStrategy');
|
||||
|
||||
module.exports = {
|
||||
appleLogin,
|
||||
|
||||
@@ -4,6 +4,7 @@ const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, getBalanceConfig } = require('@librechat/api');
|
||||
const { SystemRoles, ErrorTypes } = require('librechat-data-provider');
|
||||
const { createUser, findUser, updateUser, countUsers } = require('~/models');
|
||||
const { isEmailDomainAllowed } = require('~/server/services/domains');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
const {
|
||||
@@ -121,9 +122,18 @@ const ldapLogin = new LdapStrategy(ldapOptions, async (userinfo, done) => {
|
||||
);
|
||||
}
|
||||
|
||||
const appConfig = await getAppConfig();
|
||||
if (!isEmailDomainAllowed(mail, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(
|
||||
`[LDAP Strategy] Authentication blocked - email domain not allowed [Email: ${mail}]`,
|
||||
);
|
||||
return done(null, false, { message: 'Email domain not allowed' });
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
const isFirstRegisteredUser = (await countUsers()) === 0;
|
||||
const role = isFirstRegisteredUser ? SystemRoles.ADMIN : SystemRoles.USER;
|
||||
|
||||
user = {
|
||||
provider: 'ldap',
|
||||
ldapId,
|
||||
@@ -133,7 +143,6 @@ const ldapLogin = new LdapStrategy(ldapOptions, async (userinfo, done) => {
|
||||
name: fullName,
|
||||
role,
|
||||
};
|
||||
const appConfig = await getAppConfig({ role: user?.role });
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
const userId = await createUser(user, balanceConfig);
|
||||
user._id = userId;
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
getBalanceConfig,
|
||||
} = require('@librechat/api');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { isEmailDomainAllowed } = require('~/server/services/domains');
|
||||
const { findUser, createUser, updateUser } = require('~/models');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
@@ -183,7 +184,7 @@ const getUserInfo = async (config, accessToken, sub) => {
|
||||
const exchangedAccessToken = await exchangeAccessTokenIfNeeded(config, accessToken, sub);
|
||||
return await client.fetchUserInfo(config, exchangedAccessToken, sub);
|
||||
} catch (error) {
|
||||
logger.warn(`[openidStrategy] getUserInfo: Error fetching user info: ${error}`);
|
||||
logger.error('[openidStrategy] getUserInfo: Error fetching user info:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@@ -280,6 +281,221 @@ function convertToUsername(input, defaultValue = '') {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process OpenID authentication tokenset and userinfo
|
||||
* This is the core logic extracted from the passport strategy callback
|
||||
* Can be reused by both the passport strategy and proxy authentication
|
||||
*
|
||||
* @param {Object} tokenset - The OpenID tokenset containing access_token, id_token, etc.
|
||||
* @param {boolean} existingUsersOnly - If true, only existing users will be processed
|
||||
* @returns {Promise<Object>} The authenticated user object with tokenset
|
||||
*/
|
||||
async function processOpenIDAuth(tokenset, existingUsersOnly = false) {
|
||||
const claims = tokenset.claims ? tokenset.claims() : tokenset;
|
||||
const userinfo = {
|
||||
...claims,
|
||||
};
|
||||
|
||||
// Get userinfo from provider if we have access_token and haven't already
|
||||
if (tokenset.access_token) {
|
||||
const providerUserinfo = await getUserInfo(openidConfig, tokenset.access_token, claims.sub);
|
||||
Object.assign(userinfo, providerUserinfo);
|
||||
}
|
||||
|
||||
const appConfig = await getAppConfig();
|
||||
if (!isEmailDomainAllowed(userinfo.email, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(
|
||||
`[OpenID Auth] Authentication blocked - email domain not allowed [Email: ${userinfo.email}]`,
|
||||
);
|
||||
throw new Error('Email domain not allowed');
|
||||
}
|
||||
|
||||
const result = await findOpenIDUser({
|
||||
openidId: claims.sub || userinfo.sub,
|
||||
email: claims.email || userinfo.email,
|
||||
strategyName: 'openidStrategy',
|
||||
findUser,
|
||||
});
|
||||
let user = result.user;
|
||||
const error = result.error;
|
||||
|
||||
if (error) {
|
||||
throw new Error(ErrorTypes.AUTH_FAILED);
|
||||
}
|
||||
|
||||
const fullName = getFullName(userinfo);
|
||||
|
||||
/** Required role if configured */
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
if (requiredRole) {
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
|
||||
let decodedToken = '';
|
||||
if (requiredRoleTokenKind === 'access' && tokenset.access_token) {
|
||||
decodedToken = jwtDecode(tokenset.access_token);
|
||||
} else if (requiredRoleTokenKind === 'id' && tokenset.id_token) {
|
||||
decodedToken = jwtDecode(tokenset.id_token);
|
||||
} else if (userinfo.roles) {
|
||||
// If roles are already in userinfo, use them directly
|
||||
const roles = Array.isArray(userinfo.roles) ? userinfo.roles : [userinfo.roles];
|
||||
if (!roles.includes(requiredRole)) {
|
||||
throw new Error(`You must have the "${requiredRole}" role to log in.`);
|
||||
}
|
||||
} else if (requiredRoleParameterPath) {
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
let roles = pathParts.reduce((o, key) => {
|
||||
if (o === null || o === undefined || !(key in o)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return o[key];
|
||||
}, decodedToken);
|
||||
|
||||
if (!found) {
|
||||
logger.error(
|
||||
`[OpenID Auth] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!roles.includes(requiredRole)) {
|
||||
throw new Error(`You must have the "${requiredRole}" role to log in.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let username = '';
|
||||
if (process.env.OPENID_USERNAME_CLAIM) {
|
||||
username = userinfo[process.env.OPENID_USERNAME_CLAIM];
|
||||
} else {
|
||||
username = convertToUsername(
|
||||
userinfo.preferred_username || userinfo.username || userinfo.email,
|
||||
);
|
||||
}
|
||||
|
||||
if (existingUsersOnly && !user) {
|
||||
throw new Error('User does not exist');
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = {
|
||||
provider: 'openid',
|
||||
openidId: userinfo.sub,
|
||||
username,
|
||||
email: userinfo.email || '',
|
||||
emailVerified: userinfo.email_verified || false,
|
||||
name: fullName,
|
||||
idOnTheSource: userinfo.oid,
|
||||
};
|
||||
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
user = await createUser(user, balanceConfig, true, true);
|
||||
} else {
|
||||
user.provider = 'openid';
|
||||
user.openidId = userinfo.sub;
|
||||
user.username = username;
|
||||
user.name = fullName;
|
||||
user.idOnTheSource = userinfo.oid;
|
||||
}
|
||||
|
||||
// Handle avatar
|
||||
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
const imageUrl = userinfo.picture;
|
||||
let fileName;
|
||||
if (crypto) {
|
||||
fileName = (await hashToken(userinfo.sub)) + '.png';
|
||||
} else {
|
||||
fileName = userinfo.sub + '.png';
|
||||
}
|
||||
|
||||
const imageBuffer = await downloadImage(
|
||||
imageUrl,
|
||||
openidConfig,
|
||||
tokenset.access_token,
|
||||
userinfo.sub,
|
||||
);
|
||||
if (imageBuffer) {
|
||||
const { saveBuffer } = getStrategyFunctions(
|
||||
appConfig?.fileStrategy ?? process.env.CDN_PROVIDER,
|
||||
);
|
||||
const imagePath = await saveBuffer({
|
||||
fileName,
|
||||
userId: user._id.toString(),
|
||||
buffer: imageBuffer,
|
||||
});
|
||||
user.avatar = imagePath ?? '';
|
||||
}
|
||||
}
|
||||
|
||||
user = await updateUser(user._id, user);
|
||||
|
||||
logger.info(
|
||||
`[OpenID Auth] login success openidId: ${user.openidId} | email: ${user.email} | username: ${user.username}`,
|
||||
{
|
||||
user: {
|
||||
openidId: user.openidId,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
return { ...user, tokenset };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {boolean | undefined} [existingUsersOnly]
|
||||
*/
|
||||
function createOpenIDCallback(existingUsersOnly) {
|
||||
return async (tokenset, done) => {
|
||||
try {
|
||||
const user = await processOpenIDAuth(tokenset, existingUsersOnly);
|
||||
done(null, user);
|
||||
} catch (err) {
|
||||
if (err.message === 'Email domain not allowed') {
|
||||
return done(null, false, { message: err.message });
|
||||
}
|
||||
if (err.message === ErrorTypes.AUTH_FAILED) {
|
||||
return done(null, false, { message: err.message });
|
||||
}
|
||||
if (err.message && err.message.includes('role to log in')) {
|
||||
return done(null, false, { message: err.message });
|
||||
}
|
||||
logger.error('[openidStrategy] login failed', err);
|
||||
done(err);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the OpenID strategy specifically for admin authentication.
|
||||
* @param {Configuration} openidConfig
|
||||
*/
|
||||
const setupOpenIdAdmin = (openidConfig) => {
|
||||
try {
|
||||
if (!openidConfig) {
|
||||
throw new Error('OpenID configuration not initialized');
|
||||
}
|
||||
|
||||
const openidAdminLogin = new CustomOpenIDStrategy(
|
||||
{
|
||||
config: openidConfig,
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
usePKCE: isEnabled(process.env.OPENID_USE_PKCE),
|
||||
clockTolerance: process.env.OPENID_CLOCK_TOLERANCE || 300,
|
||||
callbackURL: process.env.DOMAIN_SERVER + '/api/admin/oauth/openid/callback',
|
||||
},
|
||||
createOpenIDCallback(true),
|
||||
);
|
||||
|
||||
passport.use('openidAdmin', openidAdminLogin);
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] setupOpenIdAdmin', err);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets up the OpenID strategy for authentication.
|
||||
* This function configures the OpenID client, handles proxy settings,
|
||||
@@ -317,10 +533,6 @@ async function setupOpenId() {
|
||||
},
|
||||
);
|
||||
|
||||
const requiredRole = process.env.OPENID_REQUIRED_ROLE;
|
||||
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
|
||||
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;
|
||||
const usePKCE = isEnabled(process.env.OPENID_USE_PKCE);
|
||||
logger.info(`[openidStrategy] OpenID authentication configuration`, {
|
||||
generateNonce: shouldGenerateNonce,
|
||||
reason: shouldGenerateNonce
|
||||
@@ -334,148 +546,19 @@ async function setupOpenId() {
|
||||
scope: process.env.OPENID_SCOPE,
|
||||
callbackURL: process.env.DOMAIN_SERVER + process.env.OPENID_CALLBACK_URL,
|
||||
clockTolerance: process.env.OPENID_CLOCK_TOLERANCE || 300,
|
||||
usePKCE,
|
||||
},
|
||||
async (tokenset, done) => {
|
||||
try {
|
||||
const claims = tokenset.claims();
|
||||
const result = await findOpenIDUser({
|
||||
openidId: claims.sub,
|
||||
email: claims.email,
|
||||
strategyName: 'openidStrategy',
|
||||
findUser,
|
||||
});
|
||||
let user = result.user;
|
||||
const error = result.error;
|
||||
|
||||
if (error) {
|
||||
return done(null, false, {
|
||||
message: ErrorTypes.AUTH_FAILED,
|
||||
});
|
||||
}
|
||||
const userinfo = {
|
||||
...claims,
|
||||
...(await getUserInfo(openidConfig, tokenset.access_token, claims.sub)),
|
||||
};
|
||||
const fullName = getFullName(userinfo);
|
||||
|
||||
if (requiredRole) {
|
||||
let decodedToken = '';
|
||||
if (requiredRoleTokenKind === 'access') {
|
||||
decodedToken = jwtDecode(tokenset.access_token);
|
||||
} else if (requiredRoleTokenKind === 'id') {
|
||||
decodedToken = jwtDecode(tokenset.id_token);
|
||||
}
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
let roles = pathParts.reduce((o, key) => {
|
||||
if (o === null || o === undefined || !(key in o)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return o[key];
|
||||
}, decodedToken);
|
||||
|
||||
if (!found) {
|
||||
logger.error(
|
||||
`[openidStrategy] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!roles.includes(requiredRole)) {
|
||||
return done(null, false, {
|
||||
message: `You must have the "${requiredRole}" role to log in.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let username = '';
|
||||
if (process.env.OPENID_USERNAME_CLAIM) {
|
||||
username = userinfo[process.env.OPENID_USERNAME_CLAIM];
|
||||
} else {
|
||||
username = convertToUsername(
|
||||
userinfo.preferred_username || userinfo.username || userinfo.email,
|
||||
);
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
user = {
|
||||
provider: 'openid',
|
||||
openidId: userinfo.sub,
|
||||
username,
|
||||
email: userinfo.email || '',
|
||||
emailVerified: userinfo.email_verified || false,
|
||||
name: fullName,
|
||||
idOnTheSource: userinfo.oid,
|
||||
};
|
||||
|
||||
const appConfig = await getAppConfig();
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
user = await createUser(user, balanceConfig, true, true);
|
||||
} else {
|
||||
user.provider = 'openid';
|
||||
user.openidId = userinfo.sub;
|
||||
user.username = username;
|
||||
user.name = fullName;
|
||||
user.idOnTheSource = userinfo.oid;
|
||||
}
|
||||
|
||||
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
/** @type {string | undefined} */
|
||||
const imageUrl = userinfo.picture;
|
||||
|
||||
let fileName;
|
||||
if (crypto) {
|
||||
fileName = (await hashToken(userinfo.sub)) + '.png';
|
||||
} else {
|
||||
fileName = userinfo.sub + '.png';
|
||||
}
|
||||
|
||||
const imageBuffer = await downloadImage(
|
||||
imageUrl,
|
||||
openidConfig,
|
||||
tokenset.access_token,
|
||||
userinfo.sub,
|
||||
);
|
||||
if (imageBuffer) {
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
const imagePath = await saveBuffer({
|
||||
fileName,
|
||||
userId: user._id.toString(),
|
||||
buffer: imageBuffer,
|
||||
});
|
||||
user.avatar = imagePath ?? '';
|
||||
}
|
||||
}
|
||||
|
||||
user = await updateUser(user._id, user);
|
||||
|
||||
logger.info(
|
||||
`[openidStrategy] login success openidId: ${user.openidId} | email: ${user.email} | username: ${user.username} `,
|
||||
{
|
||||
user: {
|
||||
openidId: user.openidId,
|
||||
username: user.username,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
done(null, { ...user, tokenset });
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy] login failed', err);
|
||||
done(err);
|
||||
}
|
||||
usePKCE: isEnabled(process.env.OPENID_USE_PKCE),
|
||||
},
|
||||
createOpenIDCallback(),
|
||||
);
|
||||
passport.use('openid', openidLogin);
|
||||
setupOpenIdAdmin(openidConfig);
|
||||
return openidConfig;
|
||||
} catch (err) {
|
||||
logger.error('[openidStrategy]', err);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @function getOpenIdConfig
|
||||
* @description Returns the OpenID client instance.
|
||||
|
||||
@@ -3,7 +3,6 @@ const { FileSources } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
|
||||
const { updateUser, createUser, getUserById } = require('~/models');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
/**
|
||||
* Updates the avatar URL of an existing user. If the user's avatar URL does not include the query parameter
|
||||
@@ -12,14 +11,15 @@ const { getAppConfig } = require('~/server/services/Config');
|
||||
*
|
||||
* @param {IUser} oldUser - The existing user object that needs to be updated.
|
||||
* @param {string} avatarUrl - The new avatar URL to be set for the user.
|
||||
* @param {AppConfig} appConfig - The application configuration object.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
* The function updates the user's avatar and saves the user object. It does not return any value.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue saving the updated user object.
|
||||
*/
|
||||
const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||
const fileStrategy = process.env.CDN_PROVIDER;
|
||||
const handleExistingUser = async (oldUser, avatarUrl, appConfig) => {
|
||||
const fileStrategy = appConfig?.fileStrategy ?? process.env.CDN_PROVIDER;
|
||||
const isLocal = fileStrategy === FileSources.local;
|
||||
|
||||
let updatedAvatar = false;
|
||||
@@ -56,6 +56,7 @@ const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||
* @param {string} params.providerId - The provider-specific ID of the user.
|
||||
* @param {string} params.username - The username of the new user.
|
||||
* @param {string} params.name - The name of the new user.
|
||||
* @param {AppConfig} appConfig - The application configuration object.
|
||||
* @param {boolean} [params.emailVerified=false] - Optional. Indicates whether the user's email is verified. Defaults to false.
|
||||
*
|
||||
* @returns {Promise<User>}
|
||||
@@ -71,6 +72,7 @@ const createSocialUser = async ({
|
||||
providerId,
|
||||
username,
|
||||
name,
|
||||
appConfig,
|
||||
emailVerified,
|
||||
}) => {
|
||||
const update = {
|
||||
@@ -83,10 +85,9 @@ const createSocialUser = async ({
|
||||
emailVerified,
|
||||
};
|
||||
|
||||
const appConfig = await getAppConfig();
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
const newUserId = await createUser(update, balanceConfig);
|
||||
const fileStrategy = process.env.CDN_PROVIDER;
|
||||
const fileStrategy = appConfig?.fileStrategy ?? process.env.CDN_PROVIDER;
|
||||
const isLocal = fileStrategy === FileSources.local;
|
||||
|
||||
if (!isLocal) {
|
||||
|
||||
@@ -7,6 +7,7 @@ const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { hashToken, logger } = require('@librechat/data-schemas');
|
||||
const { Strategy: SamlStrategy } = require('@node-saml/passport-saml');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { isEmailDomainAllowed } = require('~/server/services/domains');
|
||||
const { findUser, createUser, updateUser } = require('~/models');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const paths = require('~/config/paths');
|
||||
@@ -192,16 +193,25 @@ async function setupSaml() {
|
||||
logger.info(`[samlStrategy] SAML authentication received for NameID: ${profile.nameID}`);
|
||||
logger.debug('[samlStrategy] SAML profile:', profile);
|
||||
|
||||
const userEmail = getEmail(profile) || '';
|
||||
const appConfig = await getAppConfig();
|
||||
|
||||
if (!isEmailDomainAllowed(userEmail, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(
|
||||
`[SAML Strategy] Authentication blocked - email domain not allowed [Email: ${userEmail}]`,
|
||||
);
|
||||
return done(null, false, { message: 'Email domain not allowed' });
|
||||
}
|
||||
|
||||
let user = await findUser({ samlId: profile.nameID });
|
||||
logger.info(
|
||||
`[samlStrategy] User ${user ? 'found' : 'not found'} with SAML ID: ${profile.nameID}`,
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
const email = getEmail(profile) || '';
|
||||
user = await findUser({ email });
|
||||
user = await findUser({ email: userEmail });
|
||||
logger.info(
|
||||
`[samlStrategy] User ${user ? 'found' : 'not found'} with email: ${profile.email}`,
|
||||
`[samlStrategy] User ${user ? 'found' : 'not found'} with email: ${userEmail}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -225,11 +235,10 @@ async function setupSaml() {
|
||||
provider: 'saml',
|
||||
samlId: profile.nameID,
|
||||
username,
|
||||
email: getEmail(profile) || '',
|
||||
email: userEmail,
|
||||
emailVerified: true,
|
||||
name: fullName,
|
||||
};
|
||||
const appConfig = await getAppConfig();
|
||||
const balanceConfig = getBalanceConfig(appConfig);
|
||||
user = await createUser(user, balanceConfig, true, true);
|
||||
} else {
|
||||
@@ -250,7 +259,9 @@ async function setupSaml() {
|
||||
fileName = profile.nameID + '.png';
|
||||
}
|
||||
|
||||
const { saveBuffer } = getStrategyFunctions(process.env.CDN_PROVIDER);
|
||||
const { saveBuffer } = getStrategyFunctions(
|
||||
appConfig?.fileStrategy ?? process.env.CDN_PROVIDER,
|
||||
);
|
||||
const imagePath = await saveBuffer({
|
||||
fileName,
|
||||
userId: user._id.toString(),
|
||||
|
||||
@@ -2,6 +2,8 @@ const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ErrorTypes } = require('librechat-data-provider');
|
||||
const { createSocialUser, handleExistingUser } = require('./process');
|
||||
const { isEmailDomainAllowed } = require('~/server/services/domains');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { findUser } = require('~/models');
|
||||
|
||||
const socialLogin =
|
||||
@@ -12,11 +14,22 @@ const socialLogin =
|
||||
profile,
|
||||
});
|
||||
|
||||
const appConfig = await getAppConfig();
|
||||
|
||||
if (!isEmailDomainAllowed(email, appConfig?.registration?.allowedDomains)) {
|
||||
logger.error(
|
||||
`[${provider}Login] Authentication blocked - email domain not allowed [Email: ${email}]`,
|
||||
);
|
||||
const error = new Error(ErrorTypes.AUTH_FAILED);
|
||||
error.code = ErrorTypes.AUTH_FAILED;
|
||||
error.message = 'Email domain not allowed';
|
||||
return cb(error);
|
||||
}
|
||||
|
||||
const existingUser = await findUser({ email: email.trim() });
|
||||
const ALLOW_SOCIAL_REGISTRATION = isEnabled(process.env.ALLOW_SOCIAL_REGISTRATION);
|
||||
|
||||
if (existingUser?.provider === provider) {
|
||||
await handleExistingUser(existingUser, avatarUrl);
|
||||
await handleExistingUser(existingUser, avatarUrl, appConfig);
|
||||
return cb(null, existingUser);
|
||||
} else if (existingUser) {
|
||||
logger.info(
|
||||
@@ -28,19 +41,29 @@ const socialLogin =
|
||||
return cb(error);
|
||||
}
|
||||
|
||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||
const newUser = await createSocialUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider,
|
||||
providerKey: `${provider}Id`,
|
||||
providerId: id,
|
||||
username,
|
||||
name,
|
||||
emailVerified,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
const ALLOW_SOCIAL_REGISTRATION = isEnabled(process.env.ALLOW_SOCIAL_REGISTRATION);
|
||||
if (!ALLOW_SOCIAL_REGISTRATION) {
|
||||
logger.error(
|
||||
`[${provider}Login] Registration blocked - social registration is disabled [Email: ${email}]`,
|
||||
);
|
||||
const error = new Error(ErrorTypes.AUTH_FAILED);
|
||||
error.code = ErrorTypes.AUTH_FAILED;
|
||||
error.message = 'Social registration is disabled';
|
||||
return cb(error);
|
||||
}
|
||||
|
||||
const newUser = await createSocialUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider,
|
||||
providerKey: `${provider}Id`,
|
||||
providerId: id,
|
||||
username,
|
||||
name,
|
||||
emailVerified,
|
||||
appConfig,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
} catch (err) {
|
||||
logger.error(`[${provider}Login]`, err);
|
||||
return cb(err);
|
||||
|
||||
@@ -873,6 +873,13 @@
|
||||
* @typedef {import('@librechat/data-schemas').IMongoFile} MongoFile
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ISession
|
||||
* @typedef {import('@librechat/data-schemas').ISession} ISession
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports IBalance
|
||||
* @typedef {import('@librechat/data-schemas').IBalance} IBalance
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const axios = require('axios');
|
||||
const deriveBaseURL = require('./deriveBaseURL');
|
||||
jest.mock('~/utils', () => {
|
||||
const originalUtils = jest.requireActual('~/utils');
|
||||
jest.mock('@librechat/api', () => {
|
||||
const originalUtils = jest.requireActual('@librechat/api');
|
||||
return {
|
||||
...originalUtils,
|
||||
processModelData: jest.fn((...args) => {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
const tokenHelpers = require('./tokens');
|
||||
const deriveBaseURL = require('./deriveBaseURL');
|
||||
const extractBaseURL = require('./extractBaseURL');
|
||||
const findMessageContent = require('./findMessageContent');
|
||||
@@ -6,6 +5,5 @@ const findMessageContent = require('./findMessageContent');
|
||||
module.exports = {
|
||||
deriveBaseURL,
|
||||
extractBaseURL,
|
||||
...tokenHelpers,
|
||||
findMessageContent,
|
||||
};
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
maxTokensMap,
|
||||
matchModelName,
|
||||
processModelData,
|
||||
getModelMaxTokens,
|
||||
maxOutputTokensMap,
|
||||
findMatchingPattern,
|
||||
getModelMaxTokens,
|
||||
processModelData,
|
||||
matchModelName,
|
||||
maxTokensMap,
|
||||
} = require('./tokens');
|
||||
} = require('@librechat/api');
|
||||
|
||||
describe('getModelMaxTokens', () => {
|
||||
test('should return correct tokens for exact match', () => {
|
||||
@@ -394,7 +394,7 @@ describe('getModelMaxTokens', () => {
|
||||
});
|
||||
|
||||
test('should return correct max output tokens for GPT-5 models', () => {
|
||||
const { getModelMaxOutputTokens } = require('./tokens');
|
||||
const { getModelMaxOutputTokens } = require('@librechat/api');
|
||||
['gpt-5', 'gpt-5-mini', 'gpt-5-nano'].forEach((model) => {
|
||||
expect(getModelMaxOutputTokens(model)).toBe(maxOutputTokensMap[EModelEndpoint.openAI][model]);
|
||||
expect(getModelMaxOutputTokens(model, EModelEndpoint.openAI)).toBe(
|
||||
@@ -407,7 +407,7 @@ describe('getModelMaxTokens', () => {
|
||||
});
|
||||
|
||||
test('should return correct max output tokens for GPT-OSS models', () => {
|
||||
const { getModelMaxOutputTokens } = require('./tokens');
|
||||
const { getModelMaxOutputTokens } = require('@librechat/api');
|
||||
['gpt-oss-20b', 'gpt-oss-120b'].forEach((model) => {
|
||||
expect(getModelMaxOutputTokens(model)).toBe(maxOutputTokensMap[EModelEndpoint.openAI][model]);
|
||||
expect(getModelMaxOutputTokens(model, EModelEndpoint.openAI)).toBe(
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/** v0.8.0-rc4 */
|
||||
module.exports = {
|
||||
roots: ['<rootDir>/src'],
|
||||
testEnvironment: 'jsdom',
|
||||
@@ -28,7 +29,8 @@ module.exports = {
|
||||
'jest-file-loader',
|
||||
'^test/(.*)$': '<rootDir>/test/$1',
|
||||
'^~/(.*)$': '<rootDir>/src/$1',
|
||||
'^librechat-data-provider/react-query$': '<rootDir>/../node_modules/librechat-data-provider/src/react-query',
|
||||
'^librechat-data-provider/react-query$':
|
||||
'<rootDir>/../node_modules/librechat-data-provider/src/react-query',
|
||||
},
|
||||
restoreMocks: true,
|
||||
testResultsProcessor: 'jest-junit',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@librechat/frontend",
|
||||
"version": "v0.8.0-rc3",
|
||||
"version": "v0.8.0-rc4",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
@@ -148,8 +148,8 @@
|
||||
"tailwindcss": "^3.4.1",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^6.3.4",
|
||||
"vite-plugin-compression2": "^1.3.3",
|
||||
"vite": "^6.3.6",
|
||||
"vite-plugin-compression2": "^2.2.1",
|
||||
"vite-plugin-node-polyfills": "^0.23.0",
|
||||
"vite-plugin-pwa": "^0.21.2"
|
||||
}
|
||||
|
||||
@@ -140,6 +140,7 @@ export function AgentPanelProvider({ children }: { children: React.ReactNode })
|
||||
pluginTools,
|
||||
activePanel,
|
||||
agentsConfig,
|
||||
startupConfig,
|
||||
setActivePanel,
|
||||
endpointsConfig,
|
||||
setCurrentAgentId,
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import React, { createContext, useContext, useEffect, useMemo, useRef } from 'react';
|
||||
import React, { createContext, useContext, useEffect, useRef } from 'react';
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import { Tools, Constants, LocalStorageKeys, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { TAgentsEndpoint } from 'librechat-data-provider';
|
||||
import {
|
||||
useMCPServerManager,
|
||||
useSearchApiKeyForm,
|
||||
useGetAgentsConfig,
|
||||
useCodeApiKeyForm,
|
||||
useGetMCPTools,
|
||||
useToolToggle,
|
||||
} from '~/hooks';
|
||||
import { getTimestampedValue, setTimestamp } from '~/utils/timestamps';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
|
||||
interface BadgeRowContextType {
|
||||
conversationId?: string | null;
|
||||
mcpServerNames?: string[] | null;
|
||||
agentsConfig?: TAgentsEndpoint | null;
|
||||
webSearch: ReturnType<typeof useToolToggle>;
|
||||
artifacts: ReturnType<typeof useToolToggle>;
|
||||
@@ -21,6 +21,7 @@ interface BadgeRowContextType {
|
||||
codeInterpreter: ReturnType<typeof useToolToggle>;
|
||||
codeApiKeyForm: ReturnType<typeof useCodeApiKeyForm>;
|
||||
searchApiKeyForm: ReturnType<typeof useSearchApiKeyForm>;
|
||||
mcpServerManager: ReturnType<typeof useMCPServerManager>;
|
||||
}
|
||||
|
||||
const BadgeRowContext = createContext<BadgeRowContextType | undefined>(undefined);
|
||||
@@ -46,7 +47,6 @@ export default function BadgeRowProvider({
|
||||
}: BadgeRowProviderProps) {
|
||||
const lastKeyRef = useRef<string>('');
|
||||
const hasInitializedRef = useRef(false);
|
||||
const { mcpToolDetails } = useGetMCPTools();
|
||||
const { agentsConfig } = useGetAgentsConfig();
|
||||
const key = conversationId ?? Constants.NEW_CONVO;
|
||||
|
||||
@@ -62,16 +62,15 @@ export default function BadgeRowProvider({
|
||||
hasInitializedRef.current = true;
|
||||
lastKeyRef.current = key;
|
||||
|
||||
// Load all localStorage values
|
||||
const codeToggleKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${key}`;
|
||||
const webSearchToggleKey = `${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${key}`;
|
||||
const fileSearchToggleKey = `${LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_}${key}`;
|
||||
const artifactsToggleKey = `${LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_}${key}`;
|
||||
|
||||
const codeToggleValue = localStorage.getItem(codeToggleKey);
|
||||
const webSearchToggleValue = localStorage.getItem(webSearchToggleKey);
|
||||
const fileSearchToggleValue = localStorage.getItem(fileSearchToggleKey);
|
||||
const artifactsToggleValue = localStorage.getItem(artifactsToggleKey);
|
||||
const codeToggleValue = getTimestampedValue(codeToggleKey);
|
||||
const webSearchToggleValue = getTimestampedValue(webSearchToggleKey);
|
||||
const fileSearchToggleValue = getTimestampedValue(fileSearchToggleKey);
|
||||
const artifactsToggleValue = getTimestampedValue(artifactsToggleKey);
|
||||
|
||||
const initialValues: Record<string, any> = {};
|
||||
|
||||
@@ -107,15 +106,37 @@ export default function BadgeRowProvider({
|
||||
}
|
||||
}
|
||||
|
||||
// Always set values for all tools (use defaults if not in localStorage)
|
||||
// If ephemeralAgent is null, create a new object with just our tool values
|
||||
setEphemeralAgent((prev) => ({
|
||||
...(prev || {}),
|
||||
/**
|
||||
* Always set values for all tools (use defaults if not in `localStorage`)
|
||||
* If `ephemeralAgent` is `null`, create a new object with just our tool values
|
||||
*/
|
||||
const finalValues = {
|
||||
[Tools.execute_code]: initialValues[Tools.execute_code] ?? false,
|
||||
[Tools.web_search]: initialValues[Tools.web_search] ?? false,
|
||||
[Tools.file_search]: initialValues[Tools.file_search] ?? false,
|
||||
[AgentCapabilities.artifacts]: initialValues[AgentCapabilities.artifacts] ?? false,
|
||||
};
|
||||
|
||||
setEphemeralAgent((prev) => ({
|
||||
...(prev || {}),
|
||||
...finalValues,
|
||||
}));
|
||||
|
||||
Object.entries(finalValues).forEach(([toolKey, value]) => {
|
||||
if (value !== false) {
|
||||
let storageKey = artifactsToggleKey;
|
||||
if (toolKey === Tools.execute_code) {
|
||||
storageKey = codeToggleKey;
|
||||
} else if (toolKey === Tools.web_search) {
|
||||
storageKey = webSearchToggleKey;
|
||||
} else if (toolKey === Tools.file_search) {
|
||||
storageKey = fileSearchToggleKey;
|
||||
}
|
||||
// Store the value and set timestamp for existing values
|
||||
localStorage.setItem(storageKey, JSON.stringify(value));
|
||||
setTimestamp(storageKey);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [key, isSubmitting, setEphemeralAgent]);
|
||||
|
||||
@@ -165,20 +186,18 @@ export default function BadgeRowProvider({
|
||||
isAuthenticated: true,
|
||||
});
|
||||
|
||||
const mcpServerNames = useMemo(() => {
|
||||
return (mcpToolDetails ?? []).map((tool) => tool.name);
|
||||
}, [mcpToolDetails]);
|
||||
const mcpServerManager = useMCPServerManager({ conversationId });
|
||||
|
||||
const value: BadgeRowContextType = {
|
||||
webSearch,
|
||||
artifacts,
|
||||
fileSearch,
|
||||
agentsConfig,
|
||||
mcpServerNames,
|
||||
conversationId,
|
||||
codeApiKeyForm,
|
||||
codeInterpreter,
|
||||
searchApiKeyForm,
|
||||
mcpServerManager,
|
||||
};
|
||||
|
||||
return <BadgeRowContext.Provider value={value}>{children}</BadgeRowContext.Provider>;
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { createContext, useContext } from 'react';
|
||||
|
||||
type MessageContext = {
|
||||
messageId: string;
|
||||
nextType?: string;
|
||||
partIndex?: number;
|
||||
isExpanded: boolean;
|
||||
conversationId?: string | null;
|
||||
/** Submission state for cursor display - only true for latest message when submitting */
|
||||
isSubmitting?: boolean;
|
||||
/** Whether this is the latest message in the conversation */
|
||||
isLatestMessage?: boolean;
|
||||
};
|
||||
|
||||
export const MessageContext = createContext<MessageContext>({} as MessageContext);
|
||||
|
||||
150
client/src/Providers/MessagesViewContext.tsx
Normal file
150
client/src/Providers/MessagesViewContext.tsx
Normal file
@@ -0,0 +1,150 @@
|
||||
import React, { createContext, useContext, useMemo } from 'react';
|
||||
import { useAddedChatContext } from './AddedChatContext';
|
||||
import { useChatContext } from './ChatContext';
|
||||
|
||||
interface MessagesViewContextValue {
|
||||
/** Core conversation data */
|
||||
conversation: ReturnType<typeof useChatContext>['conversation'];
|
||||
conversationId: string | null | undefined;
|
||||
|
||||
/** Submission and control states */
|
||||
isSubmitting: ReturnType<typeof useChatContext>['isSubmitting'];
|
||||
isSubmittingFamily: boolean;
|
||||
abortScroll: ReturnType<typeof useChatContext>['abortScroll'];
|
||||
setAbortScroll: ReturnType<typeof useChatContext>['setAbortScroll'];
|
||||
|
||||
/** Message operations */
|
||||
ask: ReturnType<typeof useChatContext>['ask'];
|
||||
regenerate: ReturnType<typeof useChatContext>['regenerate'];
|
||||
handleContinue: ReturnType<typeof useChatContext>['handleContinue'];
|
||||
|
||||
/** Message state management */
|
||||
index: ReturnType<typeof useChatContext>['index'];
|
||||
latestMessage: ReturnType<typeof useChatContext>['latestMessage'];
|
||||
setLatestMessage: ReturnType<typeof useChatContext>['setLatestMessage'];
|
||||
getMessages: ReturnType<typeof useChatContext>['getMessages'];
|
||||
setMessages: ReturnType<typeof useChatContext>['setMessages'];
|
||||
}
|
||||
|
||||
const MessagesViewContext = createContext<MessagesViewContextValue | undefined>(undefined);
|
||||
|
||||
export function MessagesViewProvider({ children }: { children: React.ReactNode }) {
|
||||
const chatContext = useChatContext();
|
||||
const addedChatContext = useAddedChatContext();
|
||||
|
||||
const {
|
||||
ask,
|
||||
index,
|
||||
regenerate,
|
||||
isSubmitting: isSubmittingRoot,
|
||||
conversation,
|
||||
latestMessage,
|
||||
setAbortScroll,
|
||||
handleContinue,
|
||||
setLatestMessage,
|
||||
abortScroll,
|
||||
getMessages,
|
||||
setMessages,
|
||||
} = chatContext;
|
||||
|
||||
const { isSubmitting: isSubmittingAdditional } = addedChatContext;
|
||||
|
||||
/** Memoize conversation-related values */
|
||||
const conversationValues = useMemo(
|
||||
() => ({
|
||||
conversation,
|
||||
conversationId: conversation?.conversationId,
|
||||
}),
|
||||
[conversation],
|
||||
);
|
||||
|
||||
/** Memoize submission states */
|
||||
const submissionStates = useMemo(
|
||||
() => ({
|
||||
isSubmitting: isSubmittingRoot,
|
||||
isSubmittingFamily: isSubmittingRoot || isSubmittingAdditional,
|
||||
abortScroll,
|
||||
setAbortScroll,
|
||||
}),
|
||||
[isSubmittingRoot, isSubmittingAdditional, abortScroll, setAbortScroll],
|
||||
);
|
||||
|
||||
/** Memoize message operations (these are typically stable references) */
|
||||
const messageOperations = useMemo(
|
||||
() => ({
|
||||
ask,
|
||||
regenerate,
|
||||
getMessages,
|
||||
setMessages,
|
||||
handleContinue,
|
||||
}),
|
||||
[ask, regenerate, handleContinue, getMessages, setMessages],
|
||||
);
|
||||
|
||||
/** Memoize message state values */
|
||||
const messageState = useMemo(
|
||||
() => ({
|
||||
index,
|
||||
latestMessage,
|
||||
setLatestMessage,
|
||||
}),
|
||||
[index, latestMessage, setLatestMessage],
|
||||
);
|
||||
|
||||
/** Combine all values into final context value */
|
||||
const contextValue = useMemo<MessagesViewContextValue>(
|
||||
() => ({
|
||||
...conversationValues,
|
||||
...submissionStates,
|
||||
...messageOperations,
|
||||
...messageState,
|
||||
}),
|
||||
[conversationValues, submissionStates, messageOperations, messageState],
|
||||
);
|
||||
|
||||
return (
|
||||
<MessagesViewContext.Provider value={contextValue}>{children}</MessagesViewContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useMessagesViewContext() {
|
||||
const context = useContext(MessagesViewContext);
|
||||
if (!context) {
|
||||
throw new Error('useMessagesViewContext must be used within MessagesViewProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
|
||||
/** Hook for components that only need conversation data */
|
||||
export function useMessagesConversation() {
|
||||
const { conversation, conversationId } = useMessagesViewContext();
|
||||
return useMemo(() => ({ conversation, conversationId }), [conversation, conversationId]);
|
||||
}
|
||||
|
||||
/** Hook for components that only need submission states */
|
||||
export function useMessagesSubmission() {
|
||||
const { isSubmitting, isSubmittingFamily, abortScroll, setAbortScroll } =
|
||||
useMessagesViewContext();
|
||||
return useMemo(
|
||||
() => ({ isSubmitting, isSubmittingFamily, abortScroll, setAbortScroll }),
|
||||
[isSubmitting, isSubmittingFamily, abortScroll, setAbortScroll],
|
||||
);
|
||||
}
|
||||
|
||||
/** Hook for components that only need message operations */
|
||||
export function useMessagesOperations() {
|
||||
const { ask, regenerate, handleContinue, getMessages, setMessages } = useMessagesViewContext();
|
||||
return useMemo(
|
||||
() => ({ ask, regenerate, handleContinue, getMessages, setMessages }),
|
||||
[ask, regenerate, handleContinue, getMessages, setMessages],
|
||||
);
|
||||
}
|
||||
|
||||
/** Hook for components that only need message state */
|
||||
export function useMessagesState() {
|
||||
const { index, latestMessage, setLatestMessage } = useMessagesViewContext();
|
||||
return useMemo(
|
||||
() => ({ index, latestMessage, setLatestMessage }),
|
||||
[index, latestMessage, setLatestMessage],
|
||||
);
|
||||
}
|
||||
@@ -26,4 +26,5 @@ export * from './SidePanelContext';
|
||||
export * from './MCPPanelContext';
|
||||
export * from './ArtifactsContext';
|
||||
export * from './PromptGroupsContext';
|
||||
export * from './MessagesViewContext';
|
||||
export { default as BadgeRowProvider } from './BadgeRowContext';
|
||||
|
||||
@@ -239,6 +239,7 @@ export type AgentPanelContextType = {
|
||||
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
||||
setCurrentAgentId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||
agent_id?: string;
|
||||
startupConfig?: t.TStartupConfig | null;
|
||||
agentsConfig?: t.TAgentsEndpoint | null;
|
||||
endpointsConfig?: t.TEndpointsConfig | null;
|
||||
/** Pre-computed MCP server information indexed by server key */
|
||||
@@ -641,10 +642,3 @@ declare global {
|
||||
google_tag_manager?: unknown;
|
||||
}
|
||||
}
|
||||
|
||||
export type UIResource = {
|
||||
uri: string;
|
||||
mimeType: string;
|
||||
text: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
@@ -13,7 +13,7 @@ interface AgentGridProps {
|
||||
category: string; // Currently selected category
|
||||
searchQuery: string; // Current search query
|
||||
onSelectAgent: (agent: t.Agent) => void; // Callback when agent is selected
|
||||
scrollElement?: HTMLElement | null; // Parent scroll container for infinite scroll
|
||||
scrollElementRef?: React.RefObject<HTMLElement>; // Parent scroll container ref for infinite scroll
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -23,7 +23,7 @@ const AgentGrid: React.FC<AgentGridProps> = ({
|
||||
category,
|
||||
searchQuery,
|
||||
onSelectAgent,
|
||||
scrollElement,
|
||||
scrollElementRef,
|
||||
}) => {
|
||||
const localize = useLocalize();
|
||||
|
||||
@@ -87,7 +87,7 @@ const AgentGrid: React.FC<AgentGridProps> = ({
|
||||
// Set up infinite scroll
|
||||
const { setScrollElement } = useInfiniteScroll({
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
isLoading: isFetching || isFetchingNextPage,
|
||||
fetchNextPage: () => {
|
||||
if (hasNextPage && !isFetching) {
|
||||
fetchNextPage();
|
||||
@@ -99,10 +99,11 @@ const AgentGrid: React.FC<AgentGridProps> = ({
|
||||
|
||||
// Connect the scroll element when it's provided
|
||||
useEffect(() => {
|
||||
const scrollElement = scrollElementRef?.current;
|
||||
if (scrollElement) {
|
||||
setScrollElement(scrollElement);
|
||||
}
|
||||
}, [scrollElement, setScrollElement]);
|
||||
}, [scrollElementRef, setScrollElement]);
|
||||
|
||||
/**
|
||||
* Get category display name from API data or use fallback
|
||||
|
||||
@@ -11,7 +11,6 @@ import { useDocumentTitle, useHasAccess, useLocalize, TranslationKeys } from '~/
|
||||
import { useGetEndpointsQuery, useGetAgentCategoriesQuery } from '~/data-provider';
|
||||
import MarketplaceAdminSettings from './MarketplaceAdminSettings';
|
||||
import { SidePanelProvider, useChatContext } from '~/Providers';
|
||||
import { MarketplaceProvider } from './MarketplaceContext';
|
||||
import { SidePanelGroup } from '~/components/SidePanel';
|
||||
import { OpenSidebar } from '~/components/Chat/Menus';
|
||||
import CategoryTabs from './CategoryTabs';
|
||||
@@ -198,21 +197,21 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
*/
|
||||
const handleSearch = (query: string) => {
|
||||
const newParams = new URLSearchParams(searchParams);
|
||||
const currentCategory = displayCategory;
|
||||
|
||||
if (query.trim()) {
|
||||
newParams.set('q', query.trim());
|
||||
// Switch to "all" category when starting a new search
|
||||
navigate(`/agents/all?${newParams.toString()}`);
|
||||
} else {
|
||||
newParams.delete('q');
|
||||
// Preserve current category when clearing search
|
||||
const currentCategory = displayCategory;
|
||||
if (currentCategory === 'promoted') {
|
||||
navigate(`/agents${newParams.toString() ? `?${newParams.toString()}` : ''}`);
|
||||
} else {
|
||||
navigate(
|
||||
`/agents/${currentCategory}${newParams.toString() ? `?${newParams.toString()}` : ''}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Always preserve current category when searching or clearing search
|
||||
if (currentCategory === 'promoted') {
|
||||
navigate(`/agents${newParams.toString() ? `?${newParams.toString()}` : ''}`);
|
||||
} else {
|
||||
navigate(
|
||||
`/agents/${currentCategory}${newParams.toString() ? `?${newParams.toString()}` : ''}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -272,100 +271,176 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
}
|
||||
return (
|
||||
<div className={`relative flex w-full grow overflow-hidden bg-presentation ${className}`}>
|
||||
<MarketplaceProvider>
|
||||
<SidePanelProvider>
|
||||
<SidePanelGroup
|
||||
defaultLayout={defaultLayout}
|
||||
fullPanelCollapse={fullCollapse}
|
||||
defaultCollapsed={defaultCollapsed}
|
||||
>
|
||||
<main className="flex h-full flex-col overflow-hidden" role="main">
|
||||
{/* Scrollable container */}
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className="scrollbar-gutter-stable relative flex h-full flex-col overflow-y-auto overflow-x-hidden"
|
||||
>
|
||||
{/* Simplified header for agents marketplace - only show nav controls when needed */}
|
||||
{!isSmallScreen && (
|
||||
<div className="sticky top-0 z-20 flex items-center justify-between bg-surface-secondary p-2 font-semibold text-text-primary md:h-14">
|
||||
<div className="mx-1 flex items-center gap-2">
|
||||
{!navVisible ? (
|
||||
<>
|
||||
<OpenSidebar setNavVisible={setNavVisible} />
|
||||
<TooltipAnchor
|
||||
description={localize('com_ui_new_chat')}
|
||||
render={
|
||||
<Button
|
||||
size="icon"
|
||||
variant="outline"
|
||||
data-testid="agents-new-chat-button"
|
||||
aria-label={localize('com_ui_new_chat')}
|
||||
className="rounded-xl border border-border-light bg-surface-secondary p-2 hover:bg-surface-hover max-md:hidden"
|
||||
onClick={handleNewChat}
|
||||
>
|
||||
<NewChatIcon />
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
// Invisible placeholder to maintain height
|
||||
<div className="h-10 w-10" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{/* Hero Section - scrolls away */}
|
||||
{!isSmallScreen && (
|
||||
<div className="container mx-auto max-w-4xl">
|
||||
<div className={cn('mb-8 text-center', 'mt-12')}>
|
||||
<h1 className="mb-3 text-3xl font-bold tracking-tight text-text-primary md:text-5xl">
|
||||
{localize('com_agents_marketplace')}
|
||||
</h1>
|
||||
<p className="mx-auto mb-6 max-w-2xl text-lg text-text-secondary">
|
||||
{localize('com_agents_marketplace_subtitle')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{/* Sticky wrapper for search bar and categories */}
|
||||
<div
|
||||
className={cn(
|
||||
'sticky z-10 bg-presentation pb-4',
|
||||
isSmallScreen ? 'top-0' : 'top-14',
|
||||
)}
|
||||
>
|
||||
<div className="container mx-auto max-w-4xl px-4">
|
||||
{/* Search bar */}
|
||||
<div className="mx-auto flex max-w-2xl gap-2 pb-6">
|
||||
<SearchBar value={searchQuery} onSearch={handleSearch} />
|
||||
{/* TODO: Remove this once we have a better way to handle admin settings */}
|
||||
{/* Admin Settings */}
|
||||
<MarketplaceAdminSettings />
|
||||
</div>
|
||||
|
||||
{/* Category tabs */}
|
||||
<CategoryTabs
|
||||
categories={categoriesQuery.data || []}
|
||||
activeTab={displayCategory}
|
||||
isLoading={categoriesQuery.isLoading}
|
||||
onChange={handleTabChange}
|
||||
/>
|
||||
<SidePanelProvider>
|
||||
<SidePanelGroup
|
||||
defaultLayout={defaultLayout}
|
||||
fullPanelCollapse={fullCollapse}
|
||||
defaultCollapsed={defaultCollapsed}
|
||||
>
|
||||
<main className="flex h-full flex-col overflow-hidden" role="main">
|
||||
{/* Scrollable container */}
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className="scrollbar-gutter-stable relative flex h-full flex-col overflow-y-auto overflow-x-hidden"
|
||||
>
|
||||
{/* Simplified header for agents marketplace - only show nav controls when needed */}
|
||||
{!isSmallScreen && (
|
||||
<div className="sticky top-0 z-20 flex items-center justify-between bg-surface-secondary p-2 font-semibold text-text-primary md:h-14">
|
||||
<div className="mx-1 flex items-center gap-2">
|
||||
{!navVisible ? (
|
||||
<>
|
||||
<OpenSidebar setNavVisible={setNavVisible} />
|
||||
<TooltipAnchor
|
||||
description={localize('com_ui_new_chat')}
|
||||
render={
|
||||
<Button
|
||||
size="icon"
|
||||
variant="outline"
|
||||
data-testid="agents-new-chat-button"
|
||||
aria-label={localize('com_ui_new_chat')}
|
||||
className="rounded-xl border border-border-light bg-surface-secondary p-2 hover:bg-surface-hover max-md:hidden"
|
||||
onClick={handleNewChat}
|
||||
>
|
||||
<NewChatIcon />
|
||||
</Button>
|
||||
}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
// Invisible placeholder to maintain height
|
||||
<div className="h-10 w-10" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{/* Scrollable content area */}
|
||||
<div className="container mx-auto max-w-4xl px-4 pb-8">
|
||||
{/* Two-pane animated container wrapping category header + grid */}
|
||||
<div className="relative overflow-hidden">
|
||||
{/* Current content pane */}
|
||||
)}
|
||||
{/* Hero Section - scrolls away */}
|
||||
{!isSmallScreen && (
|
||||
<div className="container mx-auto max-w-4xl">
|
||||
<div className={cn('mb-8 text-center', 'mt-12')}>
|
||||
<h1 className="mb-3 text-3xl font-bold tracking-tight text-text-primary md:text-5xl">
|
||||
{localize('com_agents_marketplace')}
|
||||
</h1>
|
||||
<p className="mx-auto mb-6 max-w-2xl text-lg text-text-secondary">
|
||||
{localize('com_agents_marketplace_subtitle')}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{/* Sticky wrapper for search bar and categories */}
|
||||
<div
|
||||
className={cn(
|
||||
'sticky z-10 bg-presentation pb-4',
|
||||
isSmallScreen ? 'top-0' : 'top-14',
|
||||
)}
|
||||
>
|
||||
<div className="container mx-auto max-w-4xl px-4">
|
||||
{/* Search bar */}
|
||||
<div className="mx-auto flex max-w-2xl gap-2 pb-6">
|
||||
<SearchBar value={searchQuery} onSearch={handleSearch} />
|
||||
{/* TODO: Remove this once we have a better way to handle admin settings */}
|
||||
{/* Admin Settings */}
|
||||
<MarketplaceAdminSettings />
|
||||
</div>
|
||||
|
||||
{/* Category tabs */}
|
||||
<CategoryTabs
|
||||
categories={categoriesQuery.data || []}
|
||||
activeTab={displayCategory}
|
||||
isLoading={categoriesQuery.isLoading}
|
||||
onChange={handleTabChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{/* Scrollable content area */}
|
||||
<div className="container mx-auto max-w-4xl px-4 pb-8">
|
||||
{/* Two-pane animated container wrapping category header + grid */}
|
||||
<div className="relative overflow-hidden">
|
||||
{/* Current content pane */}
|
||||
<div
|
||||
className={cn(
|
||||
isTransitioning &&
|
||||
(animationDirection === 'right'
|
||||
? 'motion-safe:animate-slide-out-left'
|
||||
: 'motion-safe:animate-slide-out-right'),
|
||||
)}
|
||||
key={`pane-current-${displayCategory}`}
|
||||
>
|
||||
{/* Category header - only show when not searching */}
|
||||
{!searchQuery && (
|
||||
<div className="mb-6 mt-6">
|
||||
{(() => {
|
||||
// Get category data for display
|
||||
const getCategoryData = () => {
|
||||
if (displayCategory === 'promoted') {
|
||||
return {
|
||||
name: localize('com_agents_top_picks'),
|
||||
description: localize('com_agents_recommended'),
|
||||
};
|
||||
}
|
||||
if (displayCategory === 'all') {
|
||||
return {
|
||||
name: localize('com_agents_all'),
|
||||
description: localize('com_agents_all_description'),
|
||||
};
|
||||
}
|
||||
|
||||
// Find the category in the API data
|
||||
const categoryData = categoriesQuery.data?.find(
|
||||
(cat) => cat.value === displayCategory,
|
||||
);
|
||||
if (categoryData) {
|
||||
return {
|
||||
name: categoryData.label?.startsWith('com_')
|
||||
? localize(categoryData.label as TranslationKeys)
|
||||
: categoryData.label,
|
||||
description: categoryData.description?.startsWith('com_')
|
||||
? localize(categoryData.description as TranslationKeys)
|
||||
: categoryData.description || '',
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback for unknown categories
|
||||
return {
|
||||
name:
|
||||
displayCategory.charAt(0).toUpperCase() + displayCategory.slice(1),
|
||||
description: '',
|
||||
};
|
||||
};
|
||||
|
||||
const { name, description } = getCategoryData();
|
||||
|
||||
return (
|
||||
<div className="text-left">
|
||||
<h2 className="text-2xl font-bold text-text-primary">{name}</h2>
|
||||
{description && (
|
||||
<p className="mt-2 text-text-secondary">{description}</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Agent grid */}
|
||||
<AgentGrid
|
||||
key={`grid-${displayCategory}`}
|
||||
category={displayCategory}
|
||||
searchQuery={searchQuery}
|
||||
onSelectAgent={handleAgentSelect}
|
||||
scrollElementRef={scrollContainerRef}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Next content pane, only during transition */}
|
||||
{isTransitioning && nextCategory && (
|
||||
<div
|
||||
className={cn(
|
||||
isTransitioning &&
|
||||
(animationDirection === 'right'
|
||||
? 'motion-safe:animate-slide-out-left'
|
||||
: 'motion-safe:animate-slide-out-right'),
|
||||
'absolute inset-0',
|
||||
animationDirection === 'right'
|
||||
? 'motion-safe:animate-slide-in-right'
|
||||
: 'motion-safe:animate-slide-in-left',
|
||||
)}
|
||||
key={`pane-current-${displayCategory}`}
|
||||
key={`pane-next-${nextCategory}-${animationDirection}`}
|
||||
>
|
||||
{/* Category header - only show when not searching */}
|
||||
{!searchQuery && (
|
||||
@@ -373,13 +448,13 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
{(() => {
|
||||
// Get category data for display
|
||||
const getCategoryData = () => {
|
||||
if (displayCategory === 'promoted') {
|
||||
if (nextCategory === 'promoted') {
|
||||
return {
|
||||
name: localize('com_agents_top_picks'),
|
||||
description: localize('com_agents_recommended'),
|
||||
};
|
||||
}
|
||||
if (displayCategory === 'all') {
|
||||
if (nextCategory === 'all') {
|
||||
return {
|
||||
name: localize('com_agents_all'),
|
||||
description: localize('com_agents_all_description'),
|
||||
@@ -388,7 +463,7 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
|
||||
// Find the category in the API data
|
||||
const categoryData = categoriesQuery.data?.find(
|
||||
(cat) => cat.value === displayCategory,
|
||||
(cat) => cat.value === nextCategory,
|
||||
);
|
||||
if (categoryData) {
|
||||
return {
|
||||
@@ -396,7 +471,9 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
? localize(categoryData.label as TranslationKeys)
|
||||
: categoryData.label,
|
||||
description: categoryData.description?.startsWith('com_')
|
||||
? localize(categoryData.description as TranslationKeys)
|
||||
? localize(
|
||||
categoryData.description as Parameters<typeof localize>[0],
|
||||
)
|
||||
: categoryData.description || '',
|
||||
};
|
||||
}
|
||||
@@ -404,8 +481,8 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
// Fallback for unknown categories
|
||||
return {
|
||||
name:
|
||||
displayCategory.charAt(0).toUpperCase() +
|
||||
displayCategory.slice(1),
|
||||
(nextCategory || '').charAt(0).toUpperCase() +
|
||||
(nextCategory || '').slice(1),
|
||||
description: '',
|
||||
};
|
||||
};
|
||||
@@ -426,113 +503,30 @@ const AgentMarketplace: React.FC<AgentMarketplaceProps> = ({ className = '' }) =
|
||||
|
||||
{/* Agent grid */}
|
||||
<AgentGrid
|
||||
key={`grid-${displayCategory}`}
|
||||
category={displayCategory}
|
||||
key={`grid-${nextCategory}`}
|
||||
category={nextCategory}
|
||||
searchQuery={searchQuery}
|
||||
onSelectAgent={handleAgentSelect}
|
||||
scrollElement={scrollContainerRef.current}
|
||||
scrollElementRef={scrollContainerRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Next content pane, only during transition */}
|
||||
{isTransitioning && nextCategory && (
|
||||
<div
|
||||
className={cn(
|
||||
'absolute inset-0',
|
||||
animationDirection === 'right'
|
||||
? 'motion-safe:animate-slide-in-right'
|
||||
: 'motion-safe:animate-slide-in-left',
|
||||
)}
|
||||
key={`pane-next-${nextCategory}-${animationDirection}`}
|
||||
>
|
||||
{/* Category header - only show when not searching */}
|
||||
{!searchQuery && (
|
||||
<div className="mb-6 mt-6">
|
||||
{(() => {
|
||||
// Get category data for display
|
||||
const getCategoryData = () => {
|
||||
if (nextCategory === 'promoted') {
|
||||
return {
|
||||
name: localize('com_agents_top_picks'),
|
||||
description: localize('com_agents_recommended'),
|
||||
};
|
||||
}
|
||||
if (nextCategory === 'all') {
|
||||
return {
|
||||
name: localize('com_agents_all'),
|
||||
description: localize('com_agents_all_description'),
|
||||
};
|
||||
}
|
||||
|
||||
// Find the category in the API data
|
||||
const categoryData = categoriesQuery.data?.find(
|
||||
(cat) => cat.value === nextCategory,
|
||||
);
|
||||
if (categoryData) {
|
||||
return {
|
||||
name: categoryData.label?.startsWith('com_')
|
||||
? localize(categoryData.label as TranslationKeys)
|
||||
: categoryData.label,
|
||||
description: categoryData.description?.startsWith('com_')
|
||||
? localize(
|
||||
categoryData.description as Parameters<
|
||||
typeof localize
|
||||
>[0],
|
||||
)
|
||||
: categoryData.description || '',
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback for unknown categories
|
||||
return {
|
||||
name:
|
||||
(nextCategory || '').charAt(0).toUpperCase() +
|
||||
(nextCategory || '').slice(1),
|
||||
description: '',
|
||||
};
|
||||
};
|
||||
|
||||
const { name, description } = getCategoryData();
|
||||
|
||||
return (
|
||||
<div className="text-left">
|
||||
<h2 className="text-2xl font-bold text-text-primary">{name}</h2>
|
||||
{description && (
|
||||
<p className="mt-2 text-text-secondary">{description}</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Agent grid */}
|
||||
<AgentGrid
|
||||
key={`grid-${nextCategory}`}
|
||||
category={nextCategory}
|
||||
searchQuery={searchQuery}
|
||||
onSelectAgent={handleAgentSelect}
|
||||
scrollElement={scrollContainerRef.current}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Note: Using Tailwind keyframes for slide in/out animations */}
|
||||
</div>
|
||||
{/* Note: Using Tailwind keyframes for slide in/out animations */}
|
||||
</div>
|
||||
{/* Agent detail dialog */}
|
||||
{isDetailOpen && selectedAgent && (
|
||||
<AgentDetail
|
||||
agent={selectedAgent}
|
||||
isOpen={isDetailOpen}
|
||||
onClose={handleDetailClose}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</main>
|
||||
</SidePanelGroup>
|
||||
</SidePanelProvider>
|
||||
</MarketplaceProvider>
|
||||
{/* Agent detail dialog */}
|
||||
{isDetailOpen && selectedAgent && (
|
||||
<AgentDetail
|
||||
agent={selectedAgent}
|
||||
isOpen={isDetailOpen}
|
||||
onClose={handleDetailClose}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</main>
|
||||
</SidePanelGroup>
|
||||
</SidePanelProvider>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import React from 'react';
|
||||
import { render, screen, fireEvent } from '@testing-library/react';
|
||||
import { render, screen, fireEvent, waitFor, act } from '@testing-library/react';
|
||||
|
||||
import '@testing-library/jest-dom';
|
||||
import AgentGrid from '../AgentGrid';
|
||||
import type t from 'librechat-data-provider';
|
||||
@@ -81,6 +82,115 @@ import { useMarketplaceAgentsInfiniteQuery } from '~/data-provider/Agents';
|
||||
|
||||
const mockUseMarketplaceAgentsInfiniteQuery = jest.mocked(useMarketplaceAgentsInfiniteQuery);
|
||||
|
||||
// Helper to create mock API response
|
||||
const createMockResponse = (
|
||||
agentIds: string[],
|
||||
hasMore: boolean,
|
||||
afterCursor?: string,
|
||||
): t.AgentListResponse => ({
|
||||
object: 'list',
|
||||
data: agentIds.map(
|
||||
(id) =>
|
||||
({
|
||||
id,
|
||||
name: `Agent ${id}`,
|
||||
description: `Description for ${id}`,
|
||||
created_at: Date.now(),
|
||||
model: 'gpt-4',
|
||||
tools: [],
|
||||
instructions: '',
|
||||
avatar: null,
|
||||
provider: 'openai',
|
||||
model_parameters: {
|
||||
temperature: 0.7,
|
||||
top_p: 1,
|
||||
frequency_penalty: 0,
|
||||
presence_penalty: 0,
|
||||
maxContextTokens: 2000,
|
||||
max_context_tokens: 2000,
|
||||
max_output_tokens: 2000,
|
||||
},
|
||||
}) as t.Agent,
|
||||
),
|
||||
first_id: agentIds[0] || '',
|
||||
last_id: agentIds[agentIds.length - 1] || '',
|
||||
has_more: hasMore,
|
||||
after: afterCursor,
|
||||
});
|
||||
|
||||
// Helper to setup mock viewport
|
||||
const setupViewport = (scrollHeight: number, clientHeight: number) => {
|
||||
const listeners: { [key: string]: EventListener[] } = {};
|
||||
return {
|
||||
scrollHeight,
|
||||
clientHeight,
|
||||
scrollTop: 0,
|
||||
addEventListener: jest.fn((event: string, listener: EventListener) => {
|
||||
if (!listeners[event]) {
|
||||
listeners[event] = [];
|
||||
}
|
||||
listeners[event].push(listener);
|
||||
}),
|
||||
removeEventListener: jest.fn((event: string, listener: EventListener) => {
|
||||
if (listeners[event]) {
|
||||
listeners[event] = listeners[event].filter((l) => l !== listener);
|
||||
}
|
||||
}),
|
||||
dispatchEvent: jest.fn((event: Event) => {
|
||||
const eventListeners = listeners[event.type];
|
||||
if (eventListeners) {
|
||||
eventListeners.forEach((listener) => listener(event));
|
||||
}
|
||||
return true;
|
||||
}),
|
||||
} as unknown as HTMLElement;
|
||||
};
|
||||
|
||||
// Helper to create mock infinite query return value
|
||||
const createMockInfiniteQuery = (
|
||||
pages: t.AgentListResponse[],
|
||||
options?: {
|
||||
isLoading?: boolean;
|
||||
hasNextPage?: boolean;
|
||||
fetchNextPage?: jest.Mock;
|
||||
isFetchingNextPage?: boolean;
|
||||
},
|
||||
) =>
|
||||
({
|
||||
data: {
|
||||
pages,
|
||||
pageParams: pages.map((_, i) => (i === 0 ? undefined : `cursor-${i * 6}`)),
|
||||
},
|
||||
isLoading: options?.isLoading ?? false,
|
||||
error: null,
|
||||
isFetching: false,
|
||||
hasNextPage: options?.hasNextPage ?? pages[pages.length - 1]?.has_more ?? false,
|
||||
isFetchingNextPage: options?.isFetchingNextPage ?? false,
|
||||
fetchNextPage: options?.fetchNextPage ?? jest.fn(),
|
||||
refetch: jest.fn(),
|
||||
// Add missing required properties for UseInfiniteQueryResult
|
||||
isError: false,
|
||||
isLoadingError: false,
|
||||
isRefetchError: false,
|
||||
isSuccess: true,
|
||||
status: 'success' as const,
|
||||
dataUpdatedAt: Date.now(),
|
||||
errorUpdateCount: 0,
|
||||
errorUpdatedAt: 0,
|
||||
failureCount: 0,
|
||||
failureReason: null,
|
||||
fetchStatus: 'idle' as const,
|
||||
isFetched: true,
|
||||
isFetchedAfterMount: true,
|
||||
isInitialLoading: false,
|
||||
isPaused: false,
|
||||
isPlaceholderData: false,
|
||||
isPending: false,
|
||||
isRefetching: false,
|
||||
isStale: false,
|
||||
remove: jest.fn(),
|
||||
}) as any;
|
||||
|
||||
describe('AgentGrid Integration with useGetMarketplaceAgentsQuery', () => {
|
||||
const mockOnSelectAgent = jest.fn();
|
||||
|
||||
@@ -343,6 +453,15 @@ describe('AgentGrid Integration with useGetMarketplaceAgentsQuery', () => {
|
||||
});
|
||||
|
||||
describe('Infinite Scroll Functionality', () => {
|
||||
beforeEach(() => {
|
||||
// Silence console.log in tests
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should show loading indicator when fetching next page', () => {
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockReturnValue({
|
||||
...defaultMockQueryResult,
|
||||
@@ -396,5 +515,358 @@ describe('AgentGrid Integration with useGetMarketplaceAgentsQuery', () => {
|
||||
|
||||
expect(screen.queryByText("You've reached the end of the results")).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
describe('Auto-fetch to fill viewport', () => {
|
||||
it('should NOT auto-fetch when viewport is filled (5 agents, has_more=false)', async () => {
|
||||
const mockResponse = createMockResponse(['1', '2', '3', '4', '5'], false);
|
||||
const fetchNextPage = jest.fn();
|
||||
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockReturnValue(
|
||||
createMockInfiniteQuery([mockResponse], { fetchNextPage }),
|
||||
);
|
||||
|
||||
const scrollElement = setupViewport(500, 1000); // Content smaller than viewport
|
||||
const scrollElementRef = { current: scrollElement };
|
||||
const Wrapper = createWrapper();
|
||||
|
||||
render(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for initial render
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(5);
|
||||
});
|
||||
|
||||
// Wait to ensure no auto-fetch happens
|
||||
await act(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
});
|
||||
|
||||
// fetchNextPage should NOT be called since has_more is false
|
||||
expect(fetchNextPage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should auto-fetch when viewport not filled (7 agents, big viewport)', async () => {
|
||||
const firstPage = createMockResponse(['1', '2', '3', '4', '5', '6'], true, 'cursor-6');
|
||||
const secondPage = createMockResponse(['7'], false);
|
||||
let currentPages = [firstPage];
|
||||
const fetchNextPage = jest.fn();
|
||||
|
||||
// Mock that updates pages when fetchNextPage is called
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockImplementation(() =>
|
||||
createMockInfiniteQuery(currentPages, {
|
||||
fetchNextPage: jest.fn().mockImplementation(() => {
|
||||
fetchNextPage();
|
||||
currentPages = [firstPage, secondPage];
|
||||
return Promise.resolve();
|
||||
}),
|
||||
hasNextPage: true,
|
||||
}),
|
||||
);
|
||||
|
||||
const scrollElement = setupViewport(400, 1200); // Large viewport (content < viewport)
|
||||
const scrollElementRef = { current: scrollElement };
|
||||
const Wrapper = createWrapper();
|
||||
|
||||
const { rerender } = render(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for initial 6 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(6);
|
||||
});
|
||||
|
||||
// Wait for ResizeObserver and auto-fetch to trigger
|
||||
await act(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 150));
|
||||
});
|
||||
|
||||
// Auto-fetch should have been triggered (multiple times due to reliability checks)
|
||||
expect(fetchNextPage).toHaveBeenCalled();
|
||||
expect(fetchNextPage.mock.calls.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
// Update mock data and re-render
|
||||
currentPages = [firstPage, secondPage];
|
||||
rerender(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Should now show all 7 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(7);
|
||||
});
|
||||
});
|
||||
|
||||
it('should NOT auto-fetch when viewport is filled (7 agents, small viewport)', async () => {
|
||||
const firstPage = createMockResponse(['1', '2', '3', '4', '5', '6'], true, 'cursor-6');
|
||||
const fetchNextPage = jest.fn();
|
||||
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockReturnValue(
|
||||
createMockInfiniteQuery([firstPage], { fetchNextPage, hasNextPage: true }),
|
||||
);
|
||||
|
||||
const scrollElement = setupViewport(1200, 600); // Small viewport, content fills it
|
||||
const scrollElementRef = { current: scrollElement };
|
||||
const Wrapper = createWrapper();
|
||||
|
||||
render(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for initial 6 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(6);
|
||||
});
|
||||
|
||||
// Wait to ensure no auto-fetch happens
|
||||
await act(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
});
|
||||
|
||||
// Should NOT auto-fetch since viewport is filled
|
||||
expect(fetchNextPage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should auto-fetch once to fill viewport then stop (20 agents)', async () => {
|
||||
const allPages = [
|
||||
createMockResponse(['1', '2', '3', '4', '5', '6'], true, 'cursor-6'),
|
||||
createMockResponse(['7', '8', '9', '10', '11', '12'], true, 'cursor-12'),
|
||||
createMockResponse(['13', '14', '15', '16', '17', '18'], true, 'cursor-18'),
|
||||
createMockResponse(['19', '20'], false),
|
||||
];
|
||||
|
||||
let currentPages = [allPages[0]];
|
||||
let fetchCount = 0;
|
||||
const fetchNextPage = jest.fn();
|
||||
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockImplementation(() =>
|
||||
createMockInfiniteQuery(currentPages, {
|
||||
fetchNextPage: jest.fn().mockImplementation(() => {
|
||||
fetchCount++;
|
||||
fetchNextPage();
|
||||
if (currentPages.length < 2) {
|
||||
currentPages = allPages.slice(0, 2);
|
||||
}
|
||||
return Promise.resolve();
|
||||
}),
|
||||
hasNextPage: currentPages.length < 2,
|
||||
}),
|
||||
);
|
||||
|
||||
const scrollElement = setupViewport(600, 1000); // Viewport fits ~12 agents
|
||||
const scrollElementRef = { current: scrollElement };
|
||||
const Wrapper = createWrapper();
|
||||
|
||||
const { rerender } = render(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for initial 6 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(6);
|
||||
});
|
||||
|
||||
// Should auto-fetch to fill viewport
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(fetchNextPage).toHaveBeenCalledTimes(1);
|
||||
},
|
||||
{ timeout: 500 },
|
||||
);
|
||||
|
||||
// Simulate viewport being filled after 12 agents
|
||||
Object.defineProperty(scrollElement, 'scrollHeight', {
|
||||
value: 1200,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
currentPages = allPages.slice(0, 2);
|
||||
rerender(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Should show 12 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(12);
|
||||
});
|
||||
|
||||
// Wait to ensure no additional auto-fetch
|
||||
await act(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
});
|
||||
|
||||
// Should only have fetched once (to fill viewport)
|
||||
expect(fetchCount).toBe(1);
|
||||
expect(fetchNextPage).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should auto-fetch when viewport resizes to be taller (window resize)', async () => {
|
||||
const firstPage = createMockResponse(['1', '2', '3', '4', '5', '6'], true, 'cursor-6');
|
||||
const secondPage = createMockResponse(['7', '8', '9', '10', '11', '12'], true, 'cursor-12');
|
||||
let currentPages = [firstPage];
|
||||
const fetchNextPage = jest.fn();
|
||||
let resizeObserverCallback: ResizeObserverCallback | null = null;
|
||||
|
||||
// Mock that updates pages when fetchNextPage is called
|
||||
mockUseMarketplaceAgentsInfiniteQuery.mockImplementation(() =>
|
||||
createMockInfiniteQuery(currentPages, {
|
||||
fetchNextPage: jest.fn().mockImplementation(() => {
|
||||
fetchNextPage();
|
||||
if (currentPages.length === 1) {
|
||||
currentPages = [firstPage, secondPage];
|
||||
}
|
||||
return Promise.resolve();
|
||||
}),
|
||||
hasNextPage: currentPages.length === 1,
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock ResizeObserver to capture the callback
|
||||
const ResizeObserverMock = jest.fn().mockImplementation((callback) => {
|
||||
resizeObserverCallback = callback;
|
||||
return {
|
||||
observe: jest.fn(),
|
||||
disconnect: jest.fn(),
|
||||
unobserve: jest.fn(),
|
||||
};
|
||||
});
|
||||
global.ResizeObserver = ResizeObserverMock as any;
|
||||
|
||||
// Start with a small viewport that fits the content
|
||||
const scrollElement = setupViewport(800, 600);
|
||||
const scrollElementRef = { current: scrollElement };
|
||||
const Wrapper = createWrapper();
|
||||
|
||||
const { rerender } = render(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Wait for initial 6 agents
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(6);
|
||||
});
|
||||
|
||||
// Verify ResizeObserver was set up
|
||||
expect(ResizeObserverMock).toHaveBeenCalled();
|
||||
expect(resizeObserverCallback).not.toBeNull();
|
||||
|
||||
// Initially no fetch should happen as viewport is filled
|
||||
await act(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
});
|
||||
expect(fetchNextPage).not.toHaveBeenCalled();
|
||||
|
||||
// Simulate window resize - make viewport taller
|
||||
Object.defineProperty(scrollElement, 'clientHeight', {
|
||||
value: 1200, // Now taller than content
|
||||
writable: true,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
// Trigger ResizeObserver callback to simulate resize detection
|
||||
act(() => {
|
||||
if (resizeObserverCallback) {
|
||||
resizeObserverCallback(
|
||||
[
|
||||
{
|
||||
target: scrollElement,
|
||||
contentRect: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
width: 800,
|
||||
height: 1200,
|
||||
top: 0,
|
||||
right: 800,
|
||||
bottom: 1200,
|
||||
left: 0,
|
||||
} as DOMRectReadOnly,
|
||||
borderBoxSize: [],
|
||||
contentBoxSize: [],
|
||||
devicePixelContentBoxSize: [],
|
||||
} as ResizeObserverEntry,
|
||||
],
|
||||
{} as ResizeObserver,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Should trigger auto-fetch due to viewport now being larger than content
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(fetchNextPage).toHaveBeenCalledTimes(1);
|
||||
},
|
||||
{ timeout: 500 },
|
||||
);
|
||||
|
||||
// Update the component with new data
|
||||
rerender(
|
||||
<Wrapper>
|
||||
<AgentGrid
|
||||
category="all"
|
||||
searchQuery=""
|
||||
onSelectAgent={mockOnSelectAgent}
|
||||
scrollElementRef={scrollElementRef}
|
||||
/>
|
||||
</Wrapper>,
|
||||
);
|
||||
|
||||
// Should now show 12 agents after fetching
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('gridcell')).toHaveLength(12);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -194,7 +194,7 @@ describe('Virtual Scrolling Performance', () => {
|
||||
|
||||
// Performance check: rendering should be fast
|
||||
const renderTime = endTime - startTime;
|
||||
expect(renderTime).toBeLessThan(650);
|
||||
expect(renderTime).toBeLessThan(720);
|
||||
|
||||
console.log(`Rendered 1000 agents in ${renderTime.toFixed(2)}ms`);
|
||||
console.log(`Only ${renderedCards.length} DOM nodes created for 1000 agents`);
|
||||
|
||||
@@ -368,7 +368,7 @@ function BadgeRow({
|
||||
<CodeInterpreter />
|
||||
<FileSearch />
|
||||
<Artifacts />
|
||||
<MCPSelect conversationId={conversationId} />
|
||||
<MCPSelect />
|
||||
</>
|
||||
)}
|
||||
{ghostBadge && (
|
||||
|
||||
@@ -39,6 +39,7 @@ function AttachFileChat({
|
||||
<AttachFileMenu
|
||||
disabled={disableInputs}
|
||||
conversationId={conversationId}
|
||||
agentId={conversation?.agent_id}
|
||||
endpointFileConfig={endpointFileConfig}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -11,7 +11,13 @@ import {
|
||||
SharePointIcon,
|
||||
} from '@librechat/client';
|
||||
import type { EndpointFileConfig } from 'librechat-data-provider';
|
||||
import { useLocalize, useGetAgentsConfig, useFileHandling, useAgentCapabilities } from '~/hooks';
|
||||
import {
|
||||
useAgentToolPermissions,
|
||||
useAgentCapabilities,
|
||||
useGetAgentsConfig,
|
||||
useFileHandling,
|
||||
useLocalize,
|
||||
} from '~/hooks';
|
||||
import useSharePointFileHandling from '~/hooks/Files/useSharePointFileHandling';
|
||||
import { SharePointPickerDialog } from '~/components/SharePoint';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
@@ -21,11 +27,17 @@ import { cn } from '~/utils';
|
||||
|
||||
interface AttachFileMenuProps {
|
||||
conversationId: string;
|
||||
agentId?: string | null;
|
||||
disabled?: boolean | null;
|
||||
endpointFileConfig?: EndpointFileConfig;
|
||||
}
|
||||
|
||||
const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: AttachFileMenuProps) => {
|
||||
const AttachFileMenu = ({
|
||||
agentId,
|
||||
disabled,
|
||||
conversationId,
|
||||
endpointFileConfig,
|
||||
}: AttachFileMenuProps) => {
|
||||
const localize = useLocalize();
|
||||
const isUploadDisabled = disabled ?? false;
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
@@ -52,6 +64,8 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
|
||||
const { fileSearchAllowedByAgent, codeAllowedByAgent } = useAgentToolPermissions(agentId);
|
||||
|
||||
const handleUploadClick = (isImage?: boolean) => {
|
||||
if (!inputRef.current) {
|
||||
return;
|
||||
@@ -86,7 +100,7 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
});
|
||||
}
|
||||
|
||||
if (capabilities.fileSearchEnabled) {
|
||||
if (capabilities.fileSearchEnabled && fileSearchAllowedByAgent) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_file_search'),
|
||||
onClick: () => {
|
||||
@@ -101,7 +115,7 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
});
|
||||
}
|
||||
|
||||
if (capabilities.codeEnabled) {
|
||||
if (capabilities.codeEnabled && codeAllowedByAgent) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_code_files'),
|
||||
onClick: () => {
|
||||
@@ -142,6 +156,8 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
||||
setToolResource,
|
||||
setEphemeralAgent,
|
||||
sharePointEnabled,
|
||||
codeAllowedByAgent,
|
||||
fileSearchAllowedByAgent,
|
||||
setIsSharePointDialogOpen,
|
||||
]);
|
||||
|
||||
|
||||
@@ -2,7 +2,13 @@ import React, { useMemo } from 'react';
|
||||
import { OGDialog, OGDialogTemplate } from '@librechat/client';
|
||||
import { ImageUpIcon, FileSearch, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { EToolResources, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import { useLocalize, useGetAgentsConfig, useAgentCapabilities } from '~/hooks';
|
||||
import {
|
||||
useAgentToolPermissions,
|
||||
useAgentCapabilities,
|
||||
useGetAgentsConfig,
|
||||
useLocalize,
|
||||
} from '~/hooks';
|
||||
import { useChatContext } from '~/Providers';
|
||||
|
||||
interface DragDropModalProps {
|
||||
onOptionSelect: (option: EToolResources | undefined) => void;
|
||||
@@ -26,6 +32,11 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
||||
* Use definition for agents endpoint for ephemeral agents
|
||||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
const { conversation } = useChatContext();
|
||||
const { fileSearchAllowedByAgent, codeAllowedByAgent } = useAgentToolPermissions(
|
||||
conversation?.agent_id,
|
||||
);
|
||||
|
||||
const options = useMemo(() => {
|
||||
const _options: FileOption[] = [
|
||||
{
|
||||
@@ -35,14 +46,14 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
||||
condition: files.every((file) => file.type?.startsWith('image/')),
|
||||
},
|
||||
];
|
||||
if (capabilities.fileSearchEnabled) {
|
||||
if (capabilities.fileSearchEnabled && fileSearchAllowedByAgent) {
|
||||
_options.push({
|
||||
label: localize('com_ui_upload_file_search'),
|
||||
value: EToolResources.file_search,
|
||||
icon: <FileSearch className="icon-md" />,
|
||||
});
|
||||
}
|
||||
if (capabilities.codeEnabled) {
|
||||
if (capabilities.codeEnabled && codeAllowedByAgent) {
|
||||
_options.push({
|
||||
label: localize('com_ui_upload_code_files'),
|
||||
value: EToolResources.execute_code,
|
||||
@@ -58,7 +69,7 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
||||
}
|
||||
|
||||
return _options;
|
||||
}, [capabilities, files, localize]);
|
||||
}, [capabilities, files, localize, fileSearchAllowedByAgent, codeAllowedByAgent]);
|
||||
|
||||
if (!isVisible) {
|
||||
return null;
|
||||
|
||||
@@ -1,62 +1,102 @@
|
||||
export default function DragDropOverlay() {
|
||||
return (
|
||||
<div
|
||||
className="bg-surface-primary/85 fixed inset-0 z-[9999] flex flex-col items-center justify-center
|
||||
gap-2 text-text-primary
|
||||
backdrop-blur-[4px] transition-all duration-200
|
||||
ease-in-out animate-in fade-in
|
||||
zoom-in-95 hover:backdrop-blur-sm"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 132 108"
|
||||
fill="none"
|
||||
width="132"
|
||||
height="108"
|
||||
>
|
||||
<g clipPath="url(#clip0_3605_64419)">
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M25.2025 29.3514C10.778 33.2165 8.51524 37.1357 11.8281 49.4995L13.4846 55.6814C16.7975 68.0453 20.7166 70.308 35.1411 66.443L43.3837 64.2344C57.8082 60.3694 60.0709 56.4502 56.758 44.0864L55.1016 37.9044C51.7887 25.5406 47.8695 23.2778 33.445 27.1428L29.3237 28.2471L25.2025 29.3514ZM18.1944 42.7244C18.8572 41.5764 20.325 41.1831 21.4729 41.8459L27.3517 45.24C28.4996 45.9027 28.8929 47.3706 28.2301 48.5185L24.836 54.3972C24.1733 55.5451 22.7054 55.9384 21.5575 55.2757C20.4096 54.613 20.0163 53.1451 20.6791 51.9972L22.8732 48.1969L19.0729 46.0028C17.925 45.3401 17.5317 43.8723 18.1944 42.7244ZM29.4091 56.3843C29.066 55.104 29.8258 53.7879 31.1062 53.4449L40.3791 50.9602C41.6594 50.6172 42.9754 51.377 43.3184 52.6573C43.6615 53.9376 42.9017 55.2536 41.6214 55.5967L32.3485 58.0813C31.0682 58.4244 29.7522 57.6646 29.4091 56.3843Z"
|
||||
fill="#AFC1FF"
|
||||
/>
|
||||
</g>
|
||||
<g clipPath="url(#clip1_3605_64419)">
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M86.8124 13.4036C81.0973 11.8722 78.5673 13.2649 77.0144 19.0603L68.7322 49.97C67.1793 55.7656 68.5935 58.2151 74.4696 59.7895L97.4908 65.958C103.367 67.5326 105.816 66.1184 107.406 60.1848L115.393 30.379C115.536 29.8456 115.217 29.2959 114.681 29.16C113.478 28.8544 112.435 28.6195 111.542 28.4183C106.243 27.2253 106.22 27.2201 109.449 20.7159C109.73 20.1507 109.426 19.4638 108.816 19.3004L86.8124 13.4036ZM87.2582 28.4311C86.234 28.1567 85.1812 28.7645 84.9067 29.7888C84.6323 30.813 85.2401 31.8658 86.2644 32.1403L101.101 36.1158C102.125 36.3902 103.178 35.7824 103.453 34.7581C103.727 33.7339 103.119 32.681 102.095 32.4066L87.2582 28.4311ZM82.9189 37.2074C83.1934 36.1831 84.2462 35.5753 85.2704 35.8497L100.107 39.8252C101.131 40.0996 101.739 41.1524 101.465 42.1767C101.19 43.201 100.137 43.8088 99.1132 43.5343L84.2766 39.5589C83.2523 39.2844 82.6445 38.2316 82.9189 37.2074ZM83.2826 43.2683C82.2584 42.9939 81.2056 43.6017 80.9311 44.626C80.6567 45.6502 81.2645 46.703 82.2888 46.9775L89.7071 48.9652C90.7313 49.2396 91.7841 48.6318 92.0586 47.6076C92.333 46.5833 91.7252 45.5305 90.7009 45.256L83.2826 43.2683Z"
|
||||
fill="#7989FF"
|
||||
/>
|
||||
</g>
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M40.4004 71.8426C40.4004 57.2141 44.0575 53.5569 61.1242 53.5569H66.0004H70.8766C87.9432 53.5569 91.6004 57.2141 91.6004 71.8426V79.1569C91.6004 93.7855 87.9432 97.4426 70.8766 97.4426H61.1242C44.0575 97.4426 40.4004 93.7855 40.4004 79.1569V71.8426ZM78.8002 67.4995C78.8002 70.1504 76.6512 72.2995 74.0002 72.2995C71.3492 72.2995 69.2002 70.1504 69.2002 67.4995C69.2002 64.8485 71.3492 62.6995 74.0002 62.6995C76.6512 62.6995 78.8002 64.8485 78.8002 67.4995ZM60.7204 70.8597C60.2672 70.2553 59.5559 69.8997 58.8004 69.8997C58.045 69.8997 57.3337 70.2553 56.8804 70.8597L47.2804 83.6597C46.4851 84.72 46.7 86.2244 47.7604 87.0197C48.8208 87.8149 50.3251 87.6 51.1204 86.5397L58.8004 76.2997L66.4804 86.5397C66.8979 87.0962 67.5363 87.4443 68.2303 87.4936C68.9243 87.5429 69.6055 87.2887 70.0975 86.7967L74.8004 82.0938L79.5034 86.7967C80.4406 87.734 81.9602 87.734 82.8975 86.7967C83.8347 85.8595 83.8347 84.3399 82.8975 83.4026L76.4975 77.0026C75.5602 76.0653 74.0406 76.0653 73.1034 77.0026L68.6601 81.4459L60.7204 70.8597Z"
|
||||
fill="#3C46FF"
|
||||
/>
|
||||
<defs>
|
||||
<clipPath id="clip0_3605_64419">
|
||||
<rect
|
||||
width="56"
|
||||
height="56"
|
||||
fill="white"
|
||||
transform="translate(0 26.9939) rotate(-15)"
|
||||
/>
|
||||
</clipPath>
|
||||
<clipPath id="clip1_3605_64419">
|
||||
<rect
|
||||
width="64"
|
||||
height="64"
|
||||
fill="white"
|
||||
transform="translate(69.5645 0.5) rotate(15)"
|
||||
/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
<h3>Add anything</h3>
|
||||
<h4>Drop any file here to add it to the conversation</h4>
|
||||
</div>
|
||||
);
|
||||
import { memo } from 'react';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
interface DragDropOverlayProps {
|
||||
isActive: boolean;
|
||||
}
|
||||
|
||||
const DragDropOverlay = memo(({ isActive }: DragDropOverlayProps) => {
|
||||
const localize = useLocalize();
|
||||
return (
|
||||
<>
|
||||
{/** Modal backdrop overlay */}
|
||||
<div
|
||||
className={`fixed inset-0 z-[9998] transition-opacity duration-200 ease-in-out ${
|
||||
isActive
|
||||
? 'pointer-events-auto visible opacity-100'
|
||||
: 'pointer-events-none invisible opacity-0'
|
||||
} `}
|
||||
style={{
|
||||
/** Semi-transparent black overlay that works in both themes */
|
||||
backgroundColor: 'rgba(0, 0, 0, 0.4)',
|
||||
willChange: 'opacity',
|
||||
}}
|
||||
/>
|
||||
{/** Main content overlay */}
|
||||
<div
|
||||
className={`fixed inset-0 z-[9999] flex flex-col items-center justify-center gap-2 text-text-primary transition-all duration-200 ease-in-out ${
|
||||
isActive
|
||||
? 'pointer-events-auto visible opacity-100'
|
||||
: 'pointer-events-none invisible opacity-0'
|
||||
} `}
|
||||
style={{
|
||||
transform: isActive ? 'scale(1)' : 'scale(0.95)',
|
||||
/** Use will-change to hint browser about upcoming changes */
|
||||
willChange: 'opacity, transform',
|
||||
}}
|
||||
>
|
||||
{/** Content area with subtle background */}
|
||||
<div className="bg-surface-primary/95 flex flex-col items-center rounded-lg p-8 shadow-xl">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 132 108"
|
||||
fill="none"
|
||||
width="132"
|
||||
height="108"
|
||||
style={{
|
||||
transform: isActive ? 'translateY(0)' : 'translateY(-10px)',
|
||||
transition: 'transform 0.2s ease-in-out',
|
||||
}}
|
||||
>
|
||||
<g clipPath="url(#clip0_3605_64419)">
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M25.2025 29.3514C10.778 33.2165 8.51524 37.1357 11.8281 49.4995L13.4846 55.6814C16.7975 68.0453 20.7166 70.308 35.1411 66.443L43.3837 64.2344C57.8082 60.3694 60.0709 56.4502 56.758 44.0864L55.1016 37.9044C51.7887 25.5406 47.8695 23.2778 33.445 27.1428L29.3237 28.2471L25.2025 29.3514ZM18.1944 42.7244C18.8572 41.5764 20.325 41.1831 21.4729 41.8459L27.3517 45.24C28.4996 45.9027 28.8929 47.3706 28.2301 48.5185L24.836 54.3972C24.1733 55.5451 22.7054 55.9384 21.5575 55.2757C20.4096 54.613 20.0163 53.1451 20.6791 51.9972L22.8732 48.1969L19.0729 46.0028C17.925 45.3401 17.5317 43.8723 18.1944 42.7244ZM29.4091 56.3843C29.066 55.104 29.8258 53.7879 31.1062 53.4449L40.3791 50.9602C41.6594 50.6172 42.9754 51.377 43.3184 52.6573C43.6615 53.9376 42.9017 55.2536 41.6214 55.5967L32.3485 58.0813C31.0682 58.4244 29.7522 57.6646 29.4091 56.3843Z"
|
||||
fill="#AFC1FF"
|
||||
/>
|
||||
</g>
|
||||
<g clipPath="url(#clip1_3605_64419)">
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M86.8124 13.4036C81.0973 11.8722 78.5673 13.2649 77.0144 19.0603L68.7322 49.97C67.1793 55.7656 68.5935 58.2151 74.4696 59.7895L97.4908 65.958C103.367 67.5326 105.816 66.1184 107.406 60.1848L115.393 30.379C115.536 29.8456 115.217 29.2959 114.681 29.16C113.478 28.8544 112.435 28.6195 111.542 28.4183C106.243 27.2253 106.22 27.2201 109.449 20.7159C109.73 20.1507 109.426 19.4638 108.816 19.3004L86.8124 13.4036ZM87.2582 28.4311C86.234 28.1567 85.1812 28.7645 84.9067 29.7888C84.6323 30.813 85.2401 31.8658 86.2644 32.1403L101.101 36.1158C102.125 36.3902 103.178 35.7824 103.453 34.7581C103.727 33.7339 103.119 32.681 102.095 32.4066L87.2582 28.4311ZM82.9189 37.2074C83.1934 36.1831 84.2462 35.5753 85.2704 35.8497L100.107 39.8252C101.131 40.0996 101.739 41.1524 101.465 42.1767C101.19 43.201 100.137 43.8088 99.1132 43.5343L84.2766 39.5589C83.2523 39.2844 82.6445 38.2316 82.9189 37.2074ZM83.2826 43.2683C82.2584 42.9939 81.2056 43.6017 80.9311 44.626C80.6567 45.6502 81.2645 46.703 82.2888 46.9775L89.7071 48.9652C90.7313 49.2396 91.7841 48.6318 92.0586 47.6076C92.333 46.5833 91.7252 45.5305 90.7009 45.256L83.2826 43.2683Z"
|
||||
fill="#7989FF"
|
||||
/>
|
||||
</g>
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M40.4004 71.8426C40.4004 57.2141 44.0575 53.5569 61.1242 53.5569H66.0004H70.8766C87.9432 53.5569 91.6004 57.2141 91.6004 71.8426V79.1569C91.6004 93.7855 87.9432 97.4426 70.8766 97.4426H61.1242C44.0575 97.4426 40.4004 93.7855 40.4004 79.1569V71.8426ZM78.8002 67.4995C78.8002 70.1504 76.6512 72.2995 74.0002 72.2995C71.3492 72.2995 69.2002 70.1504 69.2002 67.4995C69.2002 64.8485 71.3492 62.6995 74.0002 62.6995C76.6512 62.6995 78.8002 64.8485 78.8002 67.4995ZM60.7204 70.8597C60.2672 70.2553 59.5559 69.8997 58.8004 69.8997C58.045 69.8997 57.3337 70.2553 56.8804 70.8597L47.2804 83.6597C46.4851 84.72 46.7 86.2244 47.7604 87.0197C48.8208 87.8149 50.3251 87.6 51.1204 86.5397L58.8004 76.2997L66.4804 86.5397C66.8979 87.0962 67.5363 87.4443 68.2303 87.4936C68.9243 87.5429 69.6055 87.2887 70.0975 86.7967L74.8004 82.0938L79.5034 86.7967C80.4406 87.734 81.9602 87.734 82.8975 86.7967C83.8347 85.8595 83.8347 84.3399 82.8975 83.4026L76.4975 77.0026C75.5602 76.0653 74.0406 76.0653 73.1034 77.0026L68.6601 81.4459L60.7204 70.8597Z"
|
||||
fill="#3C46FF"
|
||||
/>
|
||||
<defs>
|
||||
<clipPath id="clip0_3605_64419">
|
||||
<rect
|
||||
width="56"
|
||||
height="56"
|
||||
fill="white"
|
||||
transform="translate(0 26.9939) rotate(-15)"
|
||||
/>
|
||||
</clipPath>
|
||||
<clipPath id="clip1_3605_64419">
|
||||
<rect
|
||||
width="64"
|
||||
height="64"
|
||||
fill="white"
|
||||
transform="translate(69.5645 0.5) rotate(15)"
|
||||
/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
<h3 className="mt-4 text-lg font-semibold">{localize('com_ui_upload_files')}</h3>
|
||||
<h4 className="text-sm text-text-secondary">{localize('com_ui_drag_drop')}</h4>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
DragDropOverlay.displayName = 'DragDropOverlay';
|
||||
|
||||
export default DragDropOverlay;
|
||||
|
||||
@@ -17,7 +17,8 @@ export default function DragDropWrapper({ children, className }: DragDropWrapper
|
||||
return (
|
||||
<div ref={drop} className={cn('relative flex h-full w-full', className)}>
|
||||
{children}
|
||||
{isActive && <DragDropOverlay />}
|
||||
{/** Always render overlay to avoid mount/unmount overhead */}
|
||||
<DragDropOverlay isActive={isActive} />
|
||||
<DragDropModal
|
||||
files={draggedFiles}
|
||||
isVisible={showModal}
|
||||
|
||||
@@ -3,22 +3,19 @@ import { MultiSelect, MCPIcon } from '@librechat/client';
|
||||
import MCPServerStatusIcon from '~/components/MCP/MCPServerStatusIcon';
|
||||
import MCPConfigDialog from '~/components/MCP/MCPConfigDialog';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { useMCPServerManager } from '~/hooks';
|
||||
|
||||
type MCPSelectProps = { conversationId?: string | null };
|
||||
|
||||
function MCPSelectContent({ conversationId }: MCPSelectProps) {
|
||||
function MCPSelectContent() {
|
||||
const { conversationId, mcpServerManager } = useBadgeRowContext();
|
||||
const {
|
||||
configuredServers,
|
||||
mcpValues,
|
||||
isPinned,
|
||||
placeholderText,
|
||||
batchToggleServers,
|
||||
getServerStatusIconProps,
|
||||
getConfigDialogProps,
|
||||
isInitializing,
|
||||
localize,
|
||||
} = useMCPServerManager({ conversationId });
|
||||
mcpValues,
|
||||
isInitializing,
|
||||
placeholderText,
|
||||
configuredServers,
|
||||
batchToggleServers,
|
||||
getConfigDialogProps,
|
||||
getServerStatusIconProps,
|
||||
} = mcpServerManager;
|
||||
|
||||
const renderSelectedValues = useCallback(
|
||||
(values: string[], placeholder?: string) => {
|
||||
@@ -71,14 +68,6 @@ function MCPSelectContent({ conversationId }: MCPSelectProps) {
|
||||
[getServerStatusIconProps, isInitializing],
|
||||
);
|
||||
|
||||
if ((!mcpValues || mcpValues.length === 0) && !isPinned) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!configuredServers || configuredServers.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const configDialogProps = getConfigDialogProps();
|
||||
|
||||
return (
|
||||
@@ -103,10 +92,15 @@ function MCPSelectContent({ conversationId }: MCPSelectProps) {
|
||||
);
|
||||
}
|
||||
|
||||
function MCPSelect(props: MCPSelectProps) {
|
||||
const { mcpServerNames } = useBadgeRowContext();
|
||||
if ((mcpServerNames?.length ?? 0) === 0) return null;
|
||||
return <MCPSelectContent {...props} />;
|
||||
function MCPSelect() {
|
||||
const { mcpServerManager } = useBadgeRowContext();
|
||||
const { configuredServers } = mcpServerManager;
|
||||
|
||||
if (!configuredServers || configuredServers.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return <MCPSelectContent />;
|
||||
}
|
||||
|
||||
export default memo(MCPSelect);
|
||||
|
||||
@@ -4,27 +4,27 @@ import { ChevronRight } from 'lucide-react';
|
||||
import { PinIcon, MCPIcon } from '@librechat/client';
|
||||
import MCPServerStatusIcon from '~/components/MCP/MCPServerStatusIcon';
|
||||
import MCPConfigDialog from '~/components/MCP/MCPConfigDialog';
|
||||
import { useMCPServerManager } from '~/hooks';
|
||||
import { useBadgeRowContext } from '~/Providers';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
interface MCPSubMenuProps {
|
||||
placeholder?: string;
|
||||
conversationId?: string | null;
|
||||
}
|
||||
|
||||
const MCPSubMenu = React.forwardRef<HTMLDivElement, MCPSubMenuProps>(
|
||||
({ placeholder, conversationId, ...props }, ref) => {
|
||||
({ placeholder, ...props }, ref) => {
|
||||
const { mcpServerManager } = useBadgeRowContext();
|
||||
const {
|
||||
configuredServers,
|
||||
mcpValues,
|
||||
isPinned,
|
||||
mcpValues,
|
||||
setIsPinned,
|
||||
isInitializing,
|
||||
placeholderText,
|
||||
configuredServers,
|
||||
getConfigDialogProps,
|
||||
toggleServerSelection,
|
||||
getServerStatusIconProps,
|
||||
getConfigDialogProps,
|
||||
isInitializing,
|
||||
} = useMCPServerManager({ conversationId });
|
||||
} = mcpServerManager;
|
||||
|
||||
const menuStore = Ariakit.useMenuStore({
|
||||
focusLoop: true,
|
||||
|
||||
@@ -30,8 +30,7 @@ const ToolsDropdown = ({ disabled }: ToolsDropdownProps) => {
|
||||
artifacts,
|
||||
fileSearch,
|
||||
agentsConfig,
|
||||
mcpServerNames,
|
||||
conversationId,
|
||||
mcpServerManager,
|
||||
codeApiKeyForm,
|
||||
codeInterpreter,
|
||||
searchApiKeyForm,
|
||||
@@ -287,15 +286,18 @@ const ToolsDropdown = ({ disabled }: ToolsDropdownProps) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (mcpServerNames && mcpServerNames.length > 0) {
|
||||
const { configuredServers } = mcpServerManager;
|
||||
if (configuredServers && configuredServers.length > 0) {
|
||||
dropdownItems.push({
|
||||
hideOnClick: false,
|
||||
render: (props) => (
|
||||
<MCPSubMenu {...props} placeholder={mcpPlaceholder} conversationId={conversationId} />
|
||||
),
|
||||
render: (props) => <MCPSubMenu {...props} placeholder={mcpPlaceholder} />,
|
||||
});
|
||||
}
|
||||
|
||||
if (dropdownItems.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const menuTrigger = (
|
||||
<TooltipAnchor
|
||||
render={
|
||||
|
||||
@@ -26,6 +26,7 @@ type ContentPartsProps = {
|
||||
isCreatedByUser: boolean;
|
||||
isLast: boolean;
|
||||
isSubmitting: boolean;
|
||||
isLatestMessage?: boolean;
|
||||
edit?: boolean;
|
||||
enterEdit?: (cancel?: boolean) => void | null | undefined;
|
||||
siblingIdx?: number;
|
||||
@@ -45,6 +46,7 @@ const ContentParts = memo(
|
||||
isCreatedByUser,
|
||||
isLast,
|
||||
isSubmitting,
|
||||
isLatestMessage,
|
||||
edit,
|
||||
enterEdit,
|
||||
siblingIdx,
|
||||
@@ -55,6 +57,8 @@ const ContentParts = memo(
|
||||
const [isExpanded, setIsExpanded] = useState(showThinking);
|
||||
const attachmentMap = useMemo(() => mapAttachments(attachments ?? []), [attachments]);
|
||||
|
||||
const effectiveIsSubmitting = isLatestMessage ? isSubmitting : false;
|
||||
|
||||
const hasReasoningParts = useMemo(() => {
|
||||
const hasThinkPart = content?.some((part) => part?.type === ContentTypes.THINK) ?? false;
|
||||
const allThinkPartsHaveContent =
|
||||
@@ -134,7 +138,9 @@ const ContentParts = memo(
|
||||
})
|
||||
}
|
||||
label={
|
||||
isSubmitting && isLast ? localize('com_ui_thinking') : localize('com_ui_thoughts')
|
||||
effectiveIsSubmitting && isLast
|
||||
? localize('com_ui_thinking')
|
||||
: localize('com_ui_thoughts')
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
@@ -155,12 +161,14 @@ const ContentParts = memo(
|
||||
conversationId,
|
||||
partIndex: idx,
|
||||
nextType: content[idx + 1]?.type,
|
||||
isSubmitting: effectiveIsSubmitting,
|
||||
isLatestMessage,
|
||||
}}
|
||||
>
|
||||
<Part
|
||||
part={part}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
isSubmitting={effectiveIsSubmitting}
|
||||
key={`part-${messageId}-${idx}`}
|
||||
isCreatedByUser={isCreatedByUser}
|
||||
isLast={idx === content.length - 1}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useRecoilState, useRecoilValue } from 'recoil';
|
||||
import { TextareaAutosize, TooltipAnchor } from '@librechat/client';
|
||||
import { useUpdateMessageMutation } from 'librechat-data-provider/react-query';
|
||||
import type { TEditProps } from '~/common';
|
||||
import { useChatContext, useAddedChatContext } from '~/Providers';
|
||||
import { useMessagesOperations, useMessagesConversation, useAddedChatContext } from '~/Providers';
|
||||
import { cn, removeFocusRings } from '~/utils';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import Container from './Container';
|
||||
@@ -22,7 +22,8 @@ const EditMessage = ({
|
||||
const { addedIndex } = useAddedChatContext();
|
||||
const saveButtonRef = useRef<HTMLButtonElement | null>(null);
|
||||
const submitButtonRef = useRef<HTMLButtonElement | null>(null);
|
||||
const { getMessages, setMessages, conversation } = useChatContext();
|
||||
const { conversation } = useMessagesConversation();
|
||||
const { getMessages, setMessages } = useMessagesOperations();
|
||||
const [latestMultiMessage, setLatestMultiMessage] = useRecoilState(
|
||||
store.latestMessageFamily(addedIndex),
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ import type { TMessage } from 'librechat-data-provider';
|
||||
import type { TMessageContentProps, TDisplayProps } from '~/common';
|
||||
import Error from '~/components/Messages/Content/Error';
|
||||
import Thinking from '~/components/Artifacts/Thinking';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { useMessageContext } from '~/Providers';
|
||||
import MarkdownLite from './MarkdownLite';
|
||||
import EditMessage from './EditMessage';
|
||||
import { useLocalize } from '~/hooks';
|
||||
@@ -70,16 +70,12 @@ export const ErrorMessage = ({
|
||||
};
|
||||
|
||||
const DisplayMessage = ({ text, isCreatedByUser, message, showCursor }: TDisplayProps) => {
|
||||
const { isSubmitting, latestMessage } = useChatContext();
|
||||
const { isSubmitting = false, isLatestMessage = false } = useMessageContext();
|
||||
const enableUserMsgMarkdown = useRecoilValue(store.enableUserMsgMarkdown);
|
||||
const showCursorState = useMemo(
|
||||
() => showCursor === true && isSubmitting,
|
||||
[showCursor, isSubmitting],
|
||||
);
|
||||
const isLatestMessage = useMemo(
|
||||
() => message.messageId === latestMessage?.messageId,
|
||||
[message.messageId, latestMessage?.messageId],
|
||||
);
|
||||
|
||||
let content: React.ReactElement;
|
||||
if (!isCreatedByUser) {
|
||||
|
||||
@@ -85,13 +85,14 @@ const Part = memo(
|
||||
|
||||
const isToolCall =
|
||||
'args' in toolCall && (!toolCall.type || toolCall.type === ToolCallTypes.TOOL_CALL);
|
||||
if (isToolCall && toolCall.name === Tools.execute_code && toolCall.args) {
|
||||
if (isToolCall && toolCall.name === Tools.execute_code) {
|
||||
return (
|
||||
<ExecuteCode
|
||||
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
|
||||
attachments={attachments}
|
||||
isSubmitting={isSubmitting}
|
||||
output={toolCall.output ?? ''}
|
||||
initialProgress={toolCall.progress ?? 0.1}
|
||||
attachments={attachments}
|
||||
args={typeof toolCall.args === 'string' ? toolCall.args : ''}
|
||||
/>
|
||||
);
|
||||
} else if (
|
||||
|
||||
@@ -6,8 +6,8 @@ import { useRecoilState, useRecoilValue } from 'recoil';
|
||||
import { useUpdateMessageContentMutation } from 'librechat-data-provider/react-query';
|
||||
import type { Agents } from 'librechat-data-provider';
|
||||
import type { TEditProps } from '~/common';
|
||||
import { useMessagesOperations, useMessagesConversation, useAddedChatContext } from '~/Providers';
|
||||
import Container from '~/components/Chat/Messages/Content/Container';
|
||||
import { useChatContext, useAddedChatContext } from '~/Providers';
|
||||
import { cn, removeFocusRings } from '~/utils';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import store from '~/store';
|
||||
@@ -25,7 +25,8 @@ const EditTextPart = ({
|
||||
}) => {
|
||||
const localize = useLocalize();
|
||||
const { addedIndex } = useAddedChatContext();
|
||||
const { ask, getMessages, setMessages, conversation } = useChatContext();
|
||||
const { conversation } = useMessagesConversation();
|
||||
const { ask, getMessages, setMessages } = useMessagesOperations();
|
||||
const [latestMultiMessage, setLatestMultiMessage] = useRecoilState(
|
||||
store.latestMessageFamily(addedIndex),
|
||||
);
|
||||
|
||||
@@ -45,26 +45,28 @@ export function useParseArgs(args?: string): ParsedArgs | null {
|
||||
}
|
||||
|
||||
export default function ExecuteCode({
|
||||
isSubmitting,
|
||||
initialProgress = 0.1,
|
||||
args,
|
||||
output = '',
|
||||
attachments,
|
||||
}: {
|
||||
initialProgress: number;
|
||||
isSubmitting: boolean;
|
||||
args?: string;
|
||||
output?: string;
|
||||
attachments?: TAttachment[];
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const showAnalysisCode = useRecoilValue(store.showCode);
|
||||
const [showCode, setShowCode] = useState(showAnalysisCode);
|
||||
const codeContentRef = useRef<HTMLDivElement>(null);
|
||||
const [contentHeight, setContentHeight] = useState<number | undefined>(0);
|
||||
const [isAnimating, setIsAnimating] = useState(false);
|
||||
const hasOutput = output.length > 0;
|
||||
const outputRef = useRef<string>(output);
|
||||
const prevShowCodeRef = useRef<boolean>(showCode);
|
||||
const codeContentRef = useRef<HTMLDivElement>(null);
|
||||
const [isAnimating, setIsAnimating] = useState(false);
|
||||
const showAnalysisCode = useRecoilValue(store.showCode);
|
||||
const [showCode, setShowCode] = useState(showAnalysisCode);
|
||||
const [contentHeight, setContentHeight] = useState<number | undefined>(0);
|
||||
|
||||
const prevShowCodeRef = useRef<boolean>(showCode);
|
||||
const { lang, code } = useParseArgs(args) ?? ({} as ParsedArgs);
|
||||
const progress = useProgress(initialProgress);
|
||||
|
||||
@@ -136,6 +138,8 @@ export default function ExecuteCode({
|
||||
};
|
||||
}, [showCode, isAnimating]);
|
||||
|
||||
const cancelled = !isSubmitting && progress < 1;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="relative my-2.5 flex size-5 shrink-0 items-center gap-2.5">
|
||||
@@ -143,9 +147,12 @@ export default function ExecuteCode({
|
||||
progress={progress}
|
||||
onClick={() => setShowCode((prev) => !prev)}
|
||||
inProgressText={localize('com_ui_analyzing')}
|
||||
finishedText={localize('com_ui_analyzing_finished')}
|
||||
finishedText={
|
||||
cancelled ? localize('com_ui_cancelled') : localize('com_ui_analyzing_finished')
|
||||
}
|
||||
hasInput={!!code?.length}
|
||||
isExpanded={showCode}
|
||||
error={cancelled}
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user